From e24e200a8d3ccb877ed11184cbd02b1eb28f8b50 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Thu, 9 Jan 2025 13:39:13 +0530 Subject: [PATCH 01/60] SK-1772: Beta Release for Python SDK V2 (#151) * SK-1772: Beta Release for Python SDK V2 --- .github/workflows/beta-release.yml | 18 + .github/workflows/ci.yml | 47 +- .github/workflows/internal-release.yml | 23 + .github/workflows/main.yml | 47 +- .github/workflows/release.yml | 47 +- .github/workflows/shared-build-and-deploy.yml | 84 + .github/workflows/shared-tests.yml | 41 + README.md | 1495 ++++---- ci-scripts/bump_version.sh | 26 +- requirements.txt | 11 + samples/README.md | 211 -- samples/delete_sample.py | 40 - samples/detokenize_sample.py | 41 - ...etokenize_with_continue_on_error_sample.py | 54 - ...generate_bearer_token_from_creds_sample.py | 45 - samples/get_by_ids_sample.py | 36 - samples/get_sample.py | 45 - samples/get_with_options.py | 34 - samples/insert_sample.py | 38 - samples/insert_upsert_sample.py | 39 - .../insert_with_continue_on_error_sample.py | 44 - samples/invoke_connection_sample.py | 44 - samples/query_sample.py | 35 - samples/sa_token_sample.py | 26 - .../scoped_token_generation_example.py | 56 + .../signed_token_generation_example.py | 59 + .../token_generation_example.py | 55 + .../token_generation_with_context_example.py | 55 + samples/update_sample.py | 39 - samples/vault_api/client_operations.py | 92 + samples/vault_api/credentials_options.py | 94 + samples/vault_api/delete_records.py | 80 + samples/vault_api/detokenize_records.py | 82 + samples/vault_api/get_column_values.py | 86 + samples/vault_api/get_records.py | 71 + samples/vault_api/insert_byot.py | 99 + samples/vault_api/insert_records.py | 73 + samples/vault_api/invoke_connection.py | 85 + samples/vault_api/query_records.py | 76 + samples/vault_api/tokenize_records.py | 80 + samples/vault_api/update_record.py | 68 + setup.py | 28 +- skyflow/__init__.py | 6 +- skyflow/_utils.py | 166 - skyflow/client/__init__.py | 1 + skyflow/client/skyflow.py | 237 ++ skyflow/error/__init__.py | 1 + skyflow/error/_skyflow_error.py | 19 + skyflow/errors/__init__.py | 5 - skyflow/errors/_skyflow_errors.py | 120 - .../generated/__init__.py | 0 skyflow/generated/rest/__init__.py | 88 + skyflow/generated/rest/api/__init__.py | 9 + skyflow/generated/rest/api/audit_api.py | 848 +++++ .../generated/rest/api/authentication_api.py | 319 ++ skyflow/generated/rest/api/bin_lookup_api.py | 315 ++ skyflow/generated/rest/api/query_api.py | 330 ++ skyflow/generated/rest/api/records_api.py | 3310 +++++++++++++++++ skyflow/generated/rest/api/tokens_api.py | 623 ++++ skyflow/generated/rest/api_client.py | 789 ++++ skyflow/generated/rest/api_response.py | 21 + skyflow/generated/rest/configuration.py | 464 +++ skyflow/generated/rest/exceptions.py | 200 + skyflow/generated/rest/models/__init__.py | 70 + .../models/audit_event_audit_resource_type.py | 66 + .../rest/models/audit_event_context.py | 113 + .../generated/rest/models/audit_event_data.py | 88 + .../rest/models/audit_event_http_info.py | 90 + .../rest/models/batch_record_method.py | 41 + .../rest/models/context_access_type.py | 39 + .../rest/models/context_auth_mode.py | 40 + .../detokenize_record_response_value_type.py | 45 + .../generated/rest/models/googlerpc_status.py | 100 + skyflow/generated/rest/models/protobuf_any.py | 101 + .../query_service_execute_query_body.py | 88 + .../record_service_batch_operation_body.py | 101 + .../record_service_bulk_delete_record_body.py | 88 + .../record_service_insert_record_body.py | 105 + .../record_service_update_record_body.py | 97 + .../rest/models/redaction_enum_redaction.py | 40 + .../rest/models/request_action_type.py | 54 + .../rest/models/v1_audit_after_options.py | 90 + .../rest/models/v1_audit_event_response.py | 98 + .../rest/models/v1_audit_response.py | 102 + .../rest/models/v1_audit_response_event.py | 110 + .../models/v1_audit_response_event_request.py | 114 + .../models/v1_batch_operation_response.py | 90 + .../generated/rest/models/v1_batch_record.py | 108 + .../rest/models/v1_bin_list_request.py | 98 + .../rest/models/v1_bin_list_response.py | 96 + .../models/v1_bulk_delete_record_response.py | 88 + .../models/v1_bulk_get_record_response.py | 96 + skyflow/generated/rest/models/v1_byot.py | 39 + skyflow/generated/rest/models/v1_card.py | 104 + .../rest/models/v1_delete_file_response.py | 90 + .../rest/models/v1_delete_record_response.py | 90 + .../rest/models/v1_detokenize_payload.py | 100 + .../models/v1_detokenize_record_request.py | 91 + .../models/v1_detokenize_record_response.py | 95 + .../rest/models/v1_detokenize_response.py | 96 + .../generated/rest/models/v1_field_records.py | 90 + .../rest/models/v1_file_av_scan_status.py | 45 + .../rest/models/v1_get_auth_token_request.py | 98 + .../rest/models/v1_get_auth_token_response.py | 90 + .../v1_get_file_scan_status_response.py | 89 + .../rest/models/v1_get_query_response.py | 96 + .../rest/models/v1_insert_record_response.py | 96 + .../generated/rest/models/v1_member_type.py | 39 + .../rest/models/v1_record_meta_properties.py | 90 + .../rest/models/v1_tokenize_payload.py | 96 + .../rest/models/v1_tokenize_record_request.py | 90 + .../models/v1_tokenize_record_response.py | 88 + .../rest/models/v1_tokenize_response.py | 96 + .../rest/models/v1_update_record_response.py | 90 + .../rest/models/v1_vault_field_mapping.py | 92 + .../rest/models/v1_vault_schema_config.py | 96 + skyflow/generated/rest/py.typed | 0 skyflow/generated/rest/rest.py | 258 ++ skyflow/service_account/__init__.py | 9 +- skyflow/service_account/_token.py | 179 - skyflow/service_account/_utils.py | 178 + skyflow/service_account/_validity.py | 33 - skyflow/service_account/client/__init__.py | 0 skyflow/service_account/client/auth_client.py | 18 + skyflow/utils/__init__.py | 5 + skyflow/utils/_helpers.py | 11 + skyflow/utils/_skyflow_messages.py | 295 ++ skyflow/utils/_utils.py | 392 ++ skyflow/utils/_version.py | 1 + skyflow/utils/enums/__init__.py | 6 + skyflow/utils/enums/content_types.py | 8 + skyflow/utils/enums/env.py | 13 + skyflow/utils/enums/log_level.py | 8 + skyflow/utils/enums/redaction_type.py | 8 + skyflow/utils/enums/request_method.py | 8 + skyflow/utils/enums/token_mode.py | 7 + skyflow/utils/logger/__init__.py | 2 + skyflow/utils/logger/_log_helpers.py | 34 + skyflow/utils/logger/_logger.py | 50 + skyflow/utils/validations/__init__.py | 16 + skyflow/utils/validations/_validations.py | 561 +++ skyflow/vault/__init__.py | 5 - skyflow/vault/_client.py | 283 -- skyflow/vault/_config.py | 89 - skyflow/vault/_connection.py | 116 - skyflow/vault/_delete.py | 43 - skyflow/vault/_detokenize.py | 134 - skyflow/vault/_get.py | 127 - skyflow/vault/_get_by_id.py | 116 - skyflow/vault/_insert.py | 238 -- skyflow/vault/_query.py | 62 - skyflow/vault/_token.py | 44 - skyflow/vault/_update.py | 106 - skyflow/vault/client/__init__.py | 0 skyflow/vault/client/client.py | 102 + skyflow/vault/connection/__init__.py | 2 + .../connection/_invoke_connection_request.py | 12 + .../connection/_invoke_connection_response.py | 9 + skyflow/vault/controller/__init__.py | 2 + skyflow/vault/controller/_audit.py | 8 + skyflow/vault/controller/_bin_look_up.py | 7 + skyflow/vault/controller/_connections.py | 40 + skyflow/vault/controller/_vault.py | 286 ++ skyflow/vault/data/__init__.py | 11 + skyflow/vault/data/_delete_request.py | 4 + skyflow/vault/data/_delete_response.py | 11 + skyflow/vault/data/_get_request.py | 22 + skyflow/vault/data/_get_response.py | 10 + skyflow/vault/data/_insert_request.py | 21 + skyflow/vault/data/_insert_response.py | 12 + skyflow/vault/data/_query_request.py | 3 + skyflow/vault/data/_query_response.py | 10 + skyflow/vault/data/_update_request.py | 9 + skyflow/vault/data/_update_response.py | 10 + skyflow/vault/data/_upload_file_request.py | 3 + skyflow/vault/tokens/__init__.py | 4 + skyflow/vault/tokens/_detokenize_request.py | 7 + skyflow/vault/tokens/_detokenize_response.py | 12 + skyflow/vault/tokens/_tokenize_request.py | 3 + skyflow/vault/tokens/_tokenize_response.py | 11 + skyflow/version.py | 1 - tests/__init__.py | 3 - tests/client/__init__.py | 0 tests/client/test_skyflow.py | 332 ++ tests/service_account/__init__.py | 3 - tests/service_account/data/invalidJson.json | 1 - .../data/invalidPrivateKey.json | 6 - tests/service_account/data/noClientID.json | 3 - tests/service_account/data/noKeyID.json | 4 - tests/service_account/data/noPrivateKey.json | 1 - tests/service_account/data/noTokenURI.json | 5 - tests/service_account/invalid_creds.json | 1 + tests/service_account/test__utils.py | 146 + .../test_generate_bearer_token.py | 171 - tests/service_account/test_sa_token_utils.py | 37 - tests/utils/__init__.py | 0 tests/utils/logger/__init__.py | 0 tests/utils/logger/test__log_helpers.py | 86 + tests/utils/logger/test__logger.py | 101 + tests/utils/test__helpers.py | 38 + tests/utils/test__utils.py | 417 +++ tests/vault/__init__.py | 3 - tests/vault/client/__init__.py | 0 tests/vault/client/test__client.py | 105 + tests/vault/controller/__init__.py | 0 tests/vault/controller/test__connection.py | 104 + tests/vault/controller/test__vault.py | 558 +++ tests/vault/test_client_init.py | 42 - tests/vault/test_config.py | 77 - tests/vault/test_delete.py | 235 -- tests/vault/test_detokenize.py | 265 -- tests/vault/test_get.py | 259 -- tests/vault/test_get_by_id.py | 193 - tests/vault/test_insert.py | 649 ---- tests/vault/test_invoke_connection.py | 148 - tests/vault/test_query.py | 175 - tests/vault/test_token_provider_wrapper.py | 62 - tests/vault/test_update.py | 184 - tests/vault/test_url_encoder.py | 115 - 219 files changed, 18809 insertions(+), 6077 deletions(-) create mode 100644 .github/workflows/beta-release.yml create mode 100644 .github/workflows/internal-release.yml create mode 100644 .github/workflows/shared-build-and-deploy.yml create mode 100644 .github/workflows/shared-tests.yml create mode 100644 requirements.txt delete mode 100644 samples/README.md delete mode 100644 samples/delete_sample.py delete mode 100644 samples/detokenize_sample.py delete mode 100644 samples/detokenize_with_continue_on_error_sample.py delete mode 100644 samples/generate_bearer_token_from_creds_sample.py delete mode 100644 samples/get_by_ids_sample.py delete mode 100644 samples/get_sample.py delete mode 100644 samples/get_with_options.py delete mode 100644 samples/insert_sample.py delete mode 100644 samples/insert_upsert_sample.py delete mode 100644 samples/insert_with_continue_on_error_sample.py delete mode 100644 samples/invoke_connection_sample.py delete mode 100644 samples/query_sample.py delete mode 100644 samples/sa_token_sample.py create mode 100644 samples/service_account/scoped_token_generation_example.py create mode 100644 samples/service_account/signed_token_generation_example.py create mode 100644 samples/service_account/token_generation_example.py create mode 100644 samples/service_account/token_generation_with_context_example.py delete mode 100644 samples/update_sample.py create mode 100644 samples/vault_api/client_operations.py create mode 100644 samples/vault_api/credentials_options.py create mode 100644 samples/vault_api/delete_records.py create mode 100644 samples/vault_api/detokenize_records.py create mode 100644 samples/vault_api/get_column_values.py create mode 100644 samples/vault_api/get_records.py create mode 100644 samples/vault_api/insert_byot.py create mode 100644 samples/vault_api/insert_records.py create mode 100644 samples/vault_api/invoke_connection.py create mode 100644 samples/vault_api/query_records.py create mode 100644 samples/vault_api/tokenize_records.py create mode 100644 samples/vault_api/update_record.py delete mode 100644 skyflow/_utils.py create mode 100644 skyflow/client/__init__.py create mode 100644 skyflow/client/skyflow.py create mode 100644 skyflow/error/__init__.py create mode 100644 skyflow/error/_skyflow_error.py delete mode 100644 skyflow/errors/__init__.py delete mode 100644 skyflow/errors/_skyflow_errors.py rename tests/service_account/data/empty.json => skyflow/generated/__init__.py (100%) create mode 100644 skyflow/generated/rest/__init__.py create mode 100644 skyflow/generated/rest/api/__init__.py create mode 100644 skyflow/generated/rest/api/audit_api.py create mode 100644 skyflow/generated/rest/api/authentication_api.py create mode 100644 skyflow/generated/rest/api/bin_lookup_api.py create mode 100644 skyflow/generated/rest/api/query_api.py create mode 100644 skyflow/generated/rest/api/records_api.py create mode 100644 skyflow/generated/rest/api/tokens_api.py create mode 100644 skyflow/generated/rest/api_client.py create mode 100644 skyflow/generated/rest/api_response.py create mode 100644 skyflow/generated/rest/configuration.py create mode 100644 skyflow/generated/rest/exceptions.py create mode 100644 skyflow/generated/rest/models/__init__.py create mode 100644 skyflow/generated/rest/models/audit_event_audit_resource_type.py create mode 100644 skyflow/generated/rest/models/audit_event_context.py create mode 100644 skyflow/generated/rest/models/audit_event_data.py create mode 100644 skyflow/generated/rest/models/audit_event_http_info.py create mode 100644 skyflow/generated/rest/models/batch_record_method.py create mode 100644 skyflow/generated/rest/models/context_access_type.py create mode 100644 skyflow/generated/rest/models/context_auth_mode.py create mode 100644 skyflow/generated/rest/models/detokenize_record_response_value_type.py create mode 100644 skyflow/generated/rest/models/googlerpc_status.py create mode 100644 skyflow/generated/rest/models/protobuf_any.py create mode 100644 skyflow/generated/rest/models/query_service_execute_query_body.py create mode 100644 skyflow/generated/rest/models/record_service_batch_operation_body.py create mode 100644 skyflow/generated/rest/models/record_service_bulk_delete_record_body.py create mode 100644 skyflow/generated/rest/models/record_service_insert_record_body.py create mode 100644 skyflow/generated/rest/models/record_service_update_record_body.py create mode 100644 skyflow/generated/rest/models/redaction_enum_redaction.py create mode 100644 skyflow/generated/rest/models/request_action_type.py create mode 100644 skyflow/generated/rest/models/v1_audit_after_options.py create mode 100644 skyflow/generated/rest/models/v1_audit_event_response.py create mode 100644 skyflow/generated/rest/models/v1_audit_response.py create mode 100644 skyflow/generated/rest/models/v1_audit_response_event.py create mode 100644 skyflow/generated/rest/models/v1_audit_response_event_request.py create mode 100644 skyflow/generated/rest/models/v1_batch_operation_response.py create mode 100644 skyflow/generated/rest/models/v1_batch_record.py create mode 100644 skyflow/generated/rest/models/v1_bin_list_request.py create mode 100644 skyflow/generated/rest/models/v1_bin_list_response.py create mode 100644 skyflow/generated/rest/models/v1_bulk_delete_record_response.py create mode 100644 skyflow/generated/rest/models/v1_bulk_get_record_response.py create mode 100644 skyflow/generated/rest/models/v1_byot.py create mode 100644 skyflow/generated/rest/models/v1_card.py create mode 100644 skyflow/generated/rest/models/v1_delete_file_response.py create mode 100644 skyflow/generated/rest/models/v1_delete_record_response.py create mode 100644 skyflow/generated/rest/models/v1_detokenize_payload.py create mode 100644 skyflow/generated/rest/models/v1_detokenize_record_request.py create mode 100644 skyflow/generated/rest/models/v1_detokenize_record_response.py create mode 100644 skyflow/generated/rest/models/v1_detokenize_response.py create mode 100644 skyflow/generated/rest/models/v1_field_records.py create mode 100644 skyflow/generated/rest/models/v1_file_av_scan_status.py create mode 100644 skyflow/generated/rest/models/v1_get_auth_token_request.py create mode 100644 skyflow/generated/rest/models/v1_get_auth_token_response.py create mode 100644 skyflow/generated/rest/models/v1_get_file_scan_status_response.py create mode 100644 skyflow/generated/rest/models/v1_get_query_response.py create mode 100644 skyflow/generated/rest/models/v1_insert_record_response.py create mode 100644 skyflow/generated/rest/models/v1_member_type.py create mode 100644 skyflow/generated/rest/models/v1_record_meta_properties.py create mode 100644 skyflow/generated/rest/models/v1_tokenize_payload.py create mode 100644 skyflow/generated/rest/models/v1_tokenize_record_request.py create mode 100644 skyflow/generated/rest/models/v1_tokenize_record_response.py create mode 100644 skyflow/generated/rest/models/v1_tokenize_response.py create mode 100644 skyflow/generated/rest/models/v1_update_record_response.py create mode 100644 skyflow/generated/rest/models/v1_vault_field_mapping.py create mode 100644 skyflow/generated/rest/models/v1_vault_schema_config.py create mode 100644 skyflow/generated/rest/py.typed create mode 100644 skyflow/generated/rest/rest.py delete mode 100644 skyflow/service_account/_token.py create mode 100644 skyflow/service_account/_utils.py delete mode 100644 skyflow/service_account/_validity.py create mode 100644 skyflow/service_account/client/__init__.py create mode 100644 skyflow/service_account/client/auth_client.py create mode 100644 skyflow/utils/__init__.py create mode 100644 skyflow/utils/_helpers.py create mode 100644 skyflow/utils/_skyflow_messages.py create mode 100644 skyflow/utils/_utils.py create mode 100644 skyflow/utils/_version.py create mode 100644 skyflow/utils/enums/__init__.py create mode 100644 skyflow/utils/enums/content_types.py create mode 100644 skyflow/utils/enums/env.py create mode 100644 skyflow/utils/enums/log_level.py create mode 100644 skyflow/utils/enums/redaction_type.py create mode 100644 skyflow/utils/enums/request_method.py create mode 100644 skyflow/utils/enums/token_mode.py create mode 100644 skyflow/utils/logger/__init__.py create mode 100644 skyflow/utils/logger/_log_helpers.py create mode 100644 skyflow/utils/logger/_logger.py create mode 100644 skyflow/utils/validations/__init__.py create mode 100644 skyflow/utils/validations/_validations.py delete mode 100644 skyflow/vault/_client.py delete mode 100644 skyflow/vault/_config.py delete mode 100644 skyflow/vault/_connection.py delete mode 100644 skyflow/vault/_delete.py delete mode 100644 skyflow/vault/_detokenize.py delete mode 100644 skyflow/vault/_get.py delete mode 100644 skyflow/vault/_get_by_id.py delete mode 100644 skyflow/vault/_insert.py delete mode 100644 skyflow/vault/_query.py delete mode 100644 skyflow/vault/_token.py delete mode 100644 skyflow/vault/_update.py create mode 100644 skyflow/vault/client/__init__.py create mode 100644 skyflow/vault/client/client.py create mode 100644 skyflow/vault/connection/__init__.py create mode 100644 skyflow/vault/connection/_invoke_connection_request.py create mode 100644 skyflow/vault/connection/_invoke_connection_response.py create mode 100644 skyflow/vault/controller/__init__.py create mode 100644 skyflow/vault/controller/_audit.py create mode 100644 skyflow/vault/controller/_bin_look_up.py create mode 100644 skyflow/vault/controller/_connections.py create mode 100644 skyflow/vault/controller/_vault.py create mode 100644 skyflow/vault/data/__init__.py create mode 100644 skyflow/vault/data/_delete_request.py create mode 100644 skyflow/vault/data/_delete_response.py create mode 100644 skyflow/vault/data/_get_request.py create mode 100644 skyflow/vault/data/_get_response.py create mode 100644 skyflow/vault/data/_insert_request.py create mode 100644 skyflow/vault/data/_insert_response.py create mode 100644 skyflow/vault/data/_query_request.py create mode 100644 skyflow/vault/data/_query_response.py create mode 100644 skyflow/vault/data/_update_request.py create mode 100644 skyflow/vault/data/_update_response.py create mode 100644 skyflow/vault/data/_upload_file_request.py create mode 100644 skyflow/vault/tokens/__init__.py create mode 100644 skyflow/vault/tokens/_detokenize_request.py create mode 100644 skyflow/vault/tokens/_detokenize_response.py create mode 100644 skyflow/vault/tokens/_tokenize_request.py create mode 100644 skyflow/vault/tokens/_tokenize_response.py delete mode 100644 skyflow/version.py create mode 100644 tests/client/__init__.py create mode 100644 tests/client/test_skyflow.py delete mode 100644 tests/service_account/data/invalidJson.json delete mode 100644 tests/service_account/data/invalidPrivateKey.json delete mode 100644 tests/service_account/data/noClientID.json delete mode 100644 tests/service_account/data/noKeyID.json delete mode 100644 tests/service_account/data/noPrivateKey.json delete mode 100644 tests/service_account/data/noTokenURI.json create mode 100644 tests/service_account/invalid_creds.json create mode 100644 tests/service_account/test__utils.py delete mode 100644 tests/service_account/test_generate_bearer_token.py delete mode 100644 tests/service_account/test_sa_token_utils.py create mode 100644 tests/utils/__init__.py create mode 100644 tests/utils/logger/__init__.py create mode 100644 tests/utils/logger/test__log_helpers.py create mode 100644 tests/utils/logger/test__logger.py create mode 100644 tests/utils/test__helpers.py create mode 100644 tests/utils/test__utils.py create mode 100644 tests/vault/client/__init__.py create mode 100644 tests/vault/client/test__client.py create mode 100644 tests/vault/controller/__init__.py create mode 100644 tests/vault/controller/test__connection.py create mode 100644 tests/vault/controller/test__vault.py delete mode 100644 tests/vault/test_client_init.py delete mode 100644 tests/vault/test_config.py delete mode 100644 tests/vault/test_delete.py delete mode 100644 tests/vault/test_detokenize.py delete mode 100644 tests/vault/test_get.py delete mode 100644 tests/vault/test_get_by_id.py delete mode 100644 tests/vault/test_insert.py delete mode 100644 tests/vault/test_invoke_connection.py delete mode 100644 tests/vault/test_query.py delete mode 100644 tests/vault/test_token_provider_wrapper.py delete mode 100644 tests/vault/test_update.py delete mode 100644 tests/vault/test_url_encoder.py diff --git a/.github/workflows/beta-release.yml b/.github/workflows/beta-release.yml new file mode 100644 index 00000000..8fab88eb --- /dev/null +++ b/.github/workflows/beta-release.yml @@ -0,0 +1,18 @@ +name: Public Beta Release + +on: + push: + tags: '*.*.*b*' + paths-ignore: + - "setup.py" + - "*.yml" + - "*.md" + - "skyflow/utils/_version.py" + +jobs: + build-and-deploy: + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: main + pypi-token: ${{ secrets.PYPI_PUBLISH_TOKEN }} + is-internal: false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 06aa9a50..fb88ae91 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,46 +16,9 @@ jobs: checkAllCommitMessages: 'true' accessToken: ${{ secrets.PAT_ACTIONS }} error: 'One of your your commit messages is not matching the format with JIRA ID Ex: ( SDK-123 commit message )' - Test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.8' - - - name: create-json - id: create-json - uses: jsdaniell/create-json@1.1.2 - with: - name: "credentials.json" - json: ${{ secrets.CREDENTIALS_FILE_STR }} - - - name: 'Setup .env' - run: | - touch .env - echo VAULT_ID=${{secrets.VAULT_ID}} >> .env - echo VAULT_URL=${{secrets.VAULT_URL}} >> .env - echo DETOKENIZE_TEST_TOKEN=${{secrets.DETOKENIZE_TEST_TOKEN}} >> .env - echo DETOKENIZE_TEST_VALUE=${{secrets.DETOKENIZE_TEST_VALUE}} >> .env - echo CREDENTIALS_FILE_PATH=./credentials.json >> .env - echo CVV_GEN_CONNECTION_URL=${{secrets.CVV_GEN_CONNECTION_URL}} >> .env - echo VISA_CONNECTION_BASIC_AUTH=${{secrets.VISA_CONNECTION_BASIC_AUTH}} >> .env - echo SKYFLOW_ID1=${{secrets.SKYFLOW_ID1}} >> .env - echo SKYFLOW_ID2=${{secrets.SKYFLOW_ID2}} >> .env - echo SKYFLOW_ID3=${{secrets.SKYFLOW_ID3}} >> .env - - name: 'Run Tests' - run: | - python -m pip install --upgrade pip - pip install requests pyjwt datetime aiohttp cryptography python-dotenv coverage - coverage run --source skyflow -m unittest discover - - name: coverage - run: coverage xml -o test-coverage.xml - - name: Codecov - uses: codecov/codecov-action@v2.1.0 - with: - token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} - files: test-coverage.xml - name: codecov-skyflow-python - verbose: true + test: + uses: ./.github/workflows/shared-tests.yml + with: + python-version: '3.8' + secrets: inherit diff --git a/.github/workflows/internal-release.yml b/.github/workflows/internal-release.yml new file mode 100644 index 00000000..d4ad9400 --- /dev/null +++ b/.github/workflows/internal-release.yml @@ -0,0 +1,23 @@ +name: Internal Release + +on: + push: + tags-ignore: + - '*.*' + paths-ignore: + - "setup.py" + - "*.yml" + - "*.md" + - "skyflow/utils/_version.py" + - "samples/**" + branches: + - release/* + +jobs: + build-and-deploy: + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: ${{ github.ref_name }} + is-internal: true + secrets: inherit + \ No newline at end of file diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 48472e78..bebe9f3d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,46 +6,7 @@ on: - main jobs: - Test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.8' - - - name: create-json - id: create-json - uses: jsdaniell/create-json@1.1.2 - with: - name: "credentials.json" - json: ${{ secrets.CREDENTIALS_FILE_STR }} - - - name: 'Setup .env' - run: | - touch .env - echo VAULT_ID=${{secrets.VAULT_ID}} >> .env - echo VAULT_URL=${{secrets.VAULT_URL}} >> .env - echo DETOKENIZE_TEST_TOKEN=${{secrets.DETOKENIZE_TEST_TOKEN}} >> .env - echo DETOKENIZE_TEST_VALUE=${{secrets.DETOKENIZE_TEST_VALUE}} >> .env - echo CREDENTIALS_FILE_PATH=./credentials.json >> .env - echo CVV_GEN_CONNECTION_URL=${{secrets.CVV_GEN_CONNECTION_URL}} >> .env - echo VISA_CONNECTION_BASIC_AUTH=${{secrets.VISA_CONNECTION_BASIC_AUTH}} >> .env - echo SKYFLOW_ID1=${{secrets.SKYFLOW_ID1}} >> .env - echo SKYFLOW_ID2=${{secrets.SKYFLOW_ID2}} >> .env - echo SKYFLOW_ID3=${{secrets.SKYFLOW_ID3}} >> .env - - - name: 'Run Tests' - run: | - python -m pip install --upgrade pip - pip install requests pyjwt datetime aiohttp cryptography python-dotenv coverage - coverage run --source skyflow -m unittest discover - - name: coverage - run: coverage xml -o test-coverage.xml - - name: Codecov - uses: codecov/codecov-action@v2.1.0 - with: - token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} - files: test-coverage.xml - name: codecov-skyflow-python - verbose: true \ No newline at end of file + test: + uses: ./.github/workflows/shared-tests.yml + with: + python-version: '3.8' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5fcd5dbe..53894cd6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,47 +7,12 @@ on: - "setup.py" - "*.yml" - "*.md" - - "skyflow/version.py" + - "skyflow/utils/_version.py" jobs: build-and-deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - token: ${{ secrets.PAT_ACTIONS }} - ref: main - fetch-depth: 0 - - uses: actions/setup-python@v2 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - - name: Get Previous tag - id: previoustag - uses: WyriHaximus/github-action-get-previous-tag@v1 - with: - fallback: 1.0.0 - - - name: Bump Version - run: | - chmod +x ./ci-scripts/bump_version.sh - ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" - - - name: Commit changes - run: | - git config user.name ${{ github.actor }} - git config user.email ${{ github.actor }}@users.noreply.github.com - git add setup.py - git add skyflow/version.py - git commit -m "[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" - git push origin - - - name: Build and publish - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: main + is-internal: false + secrets: inherit diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml new file mode 100644 index 00000000..9d349fbb --- /dev/null +++ b/.github/workflows/shared-build-and-deploy.yml @@ -0,0 +1,84 @@ +name: Shared Build and Deploy + +on: + workflow_call: + inputs: + ref: + description: 'Git reference to use (e.g., main or branch name)' + required: true + type: string + + is-internal: + description: 'Flag for internal release' + required: true + type: boolean + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.ref }} + fetch-depth: 0 + + - uses: actions/setup-python@v2 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + + - name: Get Previous tag + id: previoustag + uses: WyriHaximus/github-action-get-previous-tag@v1 + with: + fallback: 1.0.0 + + - name: Bump Version + run: | + chmod +x ./ci-scripts/bump_version.sh + if ${{ inputs.is-internal }}; then + ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" "$(git rev-parse --short "$GITHUB_SHA")" + else + ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" + fi + + - name: Commit changes + run: | + git config user.name "${{ github.actor }}" + git config user.email "${{ github.actor }}@users.noreply.github.com" + git add setup.py + git add skyflow/utils/_version.py + + if [ "${{ inputs.is-internal }}" = "true" ]; then + VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" + COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" + git commit -m "$COMMIT_MESSAGE" + git push origin ${{ github.ref_name }} -f + else + COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" + git commit -m "$COMMIT_MESSAGE" + git push origin + fi + + - name: Build and Publish Package + if: ${{ !inputs.is-internal }} + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + + - name: Build and Publish to JFrog Artifactory + if: ${{ inputs.is-internal }} + env: + TWINE_USERNAME: ${{ secrets.JFROG_USERNAME }} + TWINE_PASSWORD: ${{ secrets.JFROG_PASSWORD }} + run: | + python setup.py sdist bdist_wheel + twine upload --repository-url https://prekarilabs.jfrog.io/artifactory/api/pypi/skyflow-python/ dist/* + + + \ No newline at end of file diff --git a/.github/workflows/shared-tests.yml b/.github/workflows/shared-tests.yml new file mode 100644 index 00000000..2c54d2b7 --- /dev/null +++ b/.github/workflows/shared-tests.yml @@ -0,0 +1,41 @@ +name: Shared Test Steps + +on: + workflow_call: + inputs: + python-version: + description: 'Python version to use' + required: true + type: string + +jobs: + run-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ inputs.python-version }} + + - name: create-json + id: create-json + uses: jsdaniell/create-json@1.1.2 + with: + name: "credentials.json" + json: ${{ secrets.VALID_SKYFLOW_CREDS_TEST }} + + - name: 'Run Tests' + run: | + pip install -r requirements.txt + python -m coverage run --source=skyflow --omit=skyflow/generated/*,skyflow/utils/validations/*,skyflow/vault/data/*,skyflow/vault/tokens/*,skyflow/vault/connection/*,skyflow/error/*,skyflow/utils/enums/*,skyflow/vault/controller/_audit.py,skyflow/vault/controller/_bin_look_up.py -m unittest discover + + - name: coverage + run: coverage xml -o test-coverage.xml + + - name: Codecov + uses: codecov/codecov-action@v2.1.0 + with: + token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} + files: test-coverage.xml + name: codecov-skyflow-python + verbose: true diff --git a/README.md b/README.md index b64a0bc7..bfdab7a3 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,13 @@ # Skyflow-python --- -## Description -This Python SDK is designed to help developers easily implement Skyflow into their python backend. +## Description +This Python SDK is designed to help developers easily implement Skyflow into their python backend. ## Table of Contents + - [Skyflow-python](#skyflow-python) - [Description](#description) - [Table of Contents](#table-of-contents) @@ -18,6 +19,7 @@ This Python SDK is designed to help developers easily implement Skyflow into the - [Vault APIs](#vault-apis) - [Insert data into the vault](#insert-data-into-the-vault) - [Detokenize](#detokenize) + - [Tokenize](#tokenize) - [Get](#get) - [Get By Id](#get-by-id) - [Redaction Types](#redaction-types) @@ -28,7 +30,6 @@ This Python SDK is designed to help developers easily implement Skyflow into the - [Logging](#logging) - [Reporting a Vulnerability](#reporting-a-vulnerability) - ## Features Authentication with a Skyflow Service Account and generation of a bearer token @@ -41,7 +42,7 @@ Invoking connections to call downstream third party APIs without directly handli ### Requirements -- Python 3.7.0 and above +- Python 3.8.0 and above ### Configuration @@ -57,545 +58,734 @@ The [Service Account](https://github.com/skyflowapi/skyflow-python/tree/main/sky The `generate_bearer_token(filepath)` function takes the credentials file path for token generation, alternatively, you can also send the entire credentials as string, by using `generate_bearer_token_from_creds(credentials)` -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/main/samples/sa_token_sample.py): +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): ```python -from skyflow.errors import SkyflowError +from skyflow.error import SkyflowError from skyflow.service_account import generate_bearer_token, is_expired # cache token for reuse -bearerToken = '' -tokenType = '' +bearer_token = '' +token_type = '' +def token_provider(): + global bearer_token + global token_type + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_toke('') + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' def token_provider(): - global bearerToken - global tokenType - - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token('') - return bearerToken, tokenType + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string) + return bearer_token, token_type try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) except SkyflowError as e: print(e) ``` +## Service Account Scoped Token Generation + +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] +} +def token_provider(): + global bearer_token + global token_type -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/main/samples/generate_bearer_token_from_creds_sample.py): + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): ```python -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token_from_creds, is_expired +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired # cache token for reuse -bearerToken = '' -tokenType = '' +bearer_token = '' +token_type = '' +options = { + 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] +} def token_provider(): - global bearerToken - global tokenType + global bearer_token + global token_type # As an example - credentials = { - "clientID": "", - "clientName": "", - "keyID": "", - "tokenURI": '', - "privateKey": "" + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', } - jsonString = json.dumps(credentials) - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token_from_creds( - credentials=jsonString) - return bearerToken, tokenType + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) except SkyflowError as e: print(e) ``` -## Vault APIs +## Service Account Token Generation With Context -The [Vault](https://github.com/skyflowapi/skyflow-python/tree/main/skyflow/vault) python module is used to perform operations on the vault such as inserting records, detokenizing tokens, retrieving tokens for a skyflow_id and to invoke a connection. +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): -To use this module, the skyflow client must first be initialized as follows. +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': "" +} +def token_provider(): + global bearer_token + global token_type + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': '' +} +def token_provider(): + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +## Service Account Signed Token Generation + +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): ```python -from skyflow.vault import Client, Configuration +from skyflow.error import SkyflowError from skyflow.service_account import generate_bearer_token, is_expired -# cache for reuse -bearerToken = '' +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90 # in seconds +} +def token_provider(): + global bearer_token + global token_type -# User defined function to provide access token to the vault apis + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90 # in seconds +} def token_provider(): - global bearerToken - if !(is_expired(bearerToken)): - return bearerToken - bearerToken, _ = generate_bearer_token('') - return bearerToken + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) +``` + +## Vault APIs + +The vault python module is used to perform operations on the vault such as inserting records, detokenizing tokens, retrieving tokens for a skyflow_id and to invoke a connection. + +To use this module, the skyflow client must first be initialized as follows. + +```python +from skyflow import Env +from skyflow import Skyflow, LogLevel + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } +credentials_string = json.dumps(skyflow_credentials) + +# Pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: "API_KEY", # API_KEY + # path: "PATH", # path to credentials file + # credentials_string: credentials_string, # credentials as string +} -#Initializing a Skyflow Client instance with a SkyflowConfiguration object -config = Configuration('', '', token_provider) -client = Client(config) +client = ( + Skyflow.builder() + .add_vault_config({ + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials # individual credentials + }) + .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) ``` +Notes: + +- If both Skyflow common credentials and individual credentials at the configuration level are provided, the individual credentials at the configuration level will take priority. + All Vault APIs must be invoked using a client instance. ### Insert data into the vault -To insert data into your vault use the `insert(records: dict, options: InsertOptions)` method. The `records` parameter is a dictionary that requires a `records` key and takes an array of records to insert into the vault. The `options` parameter takes a dictionary of optional parameters for the insertion. This includes an option to return tokenized data, upsert records and continue on error. +To insert data into your vault, use the `insert` method. The `InsertRequest` class is used to create an insert request, which contains the values to be inserted in the form of a dictionary of records. Additionally, you can provide options in the insert request, such as returning tokenized data, upserting records, and continuing on error. + +Insert call schema ```python -# Optional, indicates whether you return tokens for inserted data. Defaults to 'true'. -tokens: bool -# Optional, indicates Upsert support in the vault. -upsert: [UpsertOption] -# Optional, decides whether to continue if error encountered or not -continueOnError: bool +#Initialize Client +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest + +try: + insert_data = [ + {'': ''}, + {'': ''} + ] + + + insert_request = InsertRequest( + table_name = '', + values = insert_data, + ) + + response = skyflow_client.vault('VAULT_ID').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` -Insert call schema +**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/insert_records.py)** + ```python -from skyflow.vault import InsertOptions, UpsertOption -from skyflow.errors import SkyflowError +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest -#Initialize Client try: - # Create an Upsert option. - upsertOption = UpsertOption(table="",column="") - options = InsertOptions(tokens=True, upsert=[upsertOption], continueOnError=False) + insert_data = [ + {'card_number': '4111111111111111'}, + ] - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True # returns tokens + ) + + response = client.vault('').insert(insert_request) print("Response:", response) except SkyflowError as e: print("Error Occurred:", e) + ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_sample.py)** +Skyflow returns tokens for the record you just inserted. ```python -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "cardNumber": "41111111111", - "cvv": "123", - }, + 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', + 'card_number': '5479-4229-4622-1393' } - ] - }, - InsertOptions(True), + ], + errors=[] ) ``` -Skyflow returns tokens for the record you just inserted. +**Insert call example with `continue_on_error` option** -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "cardNumber": "f3907186-e7e2-466f-91e5-48e12c2bcbc1", - "cvv": "1989cb56-63da-4482-a2df-1f74cd0dd1a5", - "skyflow_id": "d863633c-8c75-44fc-b2ed-2b58162d1117" - }, - "request_index": 0 - } +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest + +try: + insert_data = [ + {'card_number': '4111111111111111'}, + {'card_numbe': '4111111111111111'}, # Intentional typo card_numbe ] -} + + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True, # returns tokens + continue_on_error = True + ) + + response = client.vault('').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) + ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_with_continue_on_error_sample.py) with `continueOnError` option** +Sample Response ```python -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "card_number": "4111111111111111", - "full_name": "john doe" - } - }, - { - "table": "pii_field", - "fields": { - "card_number": "4242424242424200" - "full_name": "jane doe" - } + 'skyflow_id': '89c125d1-3bec-4360-b701-a032dda16500', + 'request_index': 0, + 'card_number': '5479-4229-4622-1393' } + ], + errors= + [ + { + 'request_index': 1, + 'error': 'Insert failed. Column card_numbe is invalid. Specify a valid column.' + } ] - }, InsertOptions(tokens=True, continueOnError=True) ) + ``` -Sample Response +**Insert call example with `upsert` options** -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "card_number": "f37186-e7e2-466f-91e5-48e2bcbc1", - "full_name": "1989cb56-63a-4482-adf-1f74cd1a5", - "skyflow_id": "3daf1a7f-bc7f-4fc9-8c56-a6e4e93231e6" - }, - "request_index": 0 - } - ], - "errors": [ - { - "error": { - "code": 404, - "description": "Object Name pii_field was not found for Vault - requestId : af4aad11-f276-474d-b626-c75c8b35d49e", - "request_index": 1 - } - } - ] -} +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest +try: + insert_data = [ + {"name": 'sample name'}, + ] + + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True, # returns tokens + upsert = "name" # unique column name + ) + + response = client.vault('').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_upsert_sample.py) with `upsert` options** +Skyflow returns tokens, with `upsert` support, for the record you just inserted. ```python -upsertOption = UpsertOption(table="cards",column="cardNumber") -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "cardNumber": "41111111111", - "cvv": "123", - }, + 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', + 'name': '3f27b3d7-6bf0-432a-acf9-789c0470e2da' } - ] - }, - InsertOptions(tokens=True,upsert=[upsertOption]), + ], + errors=[] ) ``` -Skyflow returns tokens, with `upsert` support, for the record you just inserted. - -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "cardNumber": "f3907186-e7e2-466f-91e5-48e12c2bcbc1", - "cvv": "1989cb56-63da-4482-a2df-1f74cd0dd1a5", - "skyflow_id": "60b32788-12ec-4dd7-9da5-0146c3afbe11" - }, - "request_index": 0 - } - ] -} -``` - ### Detokenize -To retrieve tokens from your vault, you can use the `Detokenize(records: dict, options: DetokenizeOptions)` method.The records parameter takes a dictionary that contains the `records` key that takes an array of records to return. The options parameter is a `DetokenizeOptions` object that provides further options, including `continueOnError` operation, for your detokenize call, as shown below: +To retrieve tokens from your vault, you can use the `detokenize` method. The `DetokenizeRequest` class requires a list of detokenization data to be provided as input. Additionally, the redaction type and continue on error are optional parameters. ```python -{ - "records":[ - { - "token": str , # Token for the record to fetch - "redaction": Skyflow.RedactionType # Optional. Redaction to apply for retrieved data. E.g. RedactionType.MASKED - } - ] -} +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +try: + detokenize_data = ['', '', ''] + + detokenize_request = DetokenizeRequest( + tokens =d etokenize_data, + continue_on_error = False, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional + ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) + ``` + Notes: -- `redaction` defaults to [RedactionType.PLAIN_TEXT](#redaction-types). -- `continueOnError` in DetokenizeOptions will default to `True`. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/detokenize_sample.py) of a detokenize call: +- `redaction_type` defaults to `RedactionType.PLAIN_TEXT`. +- `continue_on_error` default valus is `False`. + +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/detokenize_records.py) of a detokenize call: ```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + try: - client.detokenize( - { - "records": [ - { - "token": "45012507-f72b-4f5c-9bf9-86b133bae719" - }, - { - "token": '1r434532-6f76-4319-bdd3-96281e051051', - "redaction": Skyflow.RedactionType.MASKED - }, - { - "token": "invalid-token" - } - ] - } + detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-3840'] + + detokenize_request = DetokenizeRequest( + tokens = detokenize_data, + continue_on_error = False, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "token": "131e70dc-6f76-4319-bdd3-96281e051051", - "value": "1990-01-01" - }, - { - "token": "1r434532-6f76-4319-bdd3-96281e051051", - "value": "xxxxxxer", - } - ], - "errors": [ - { - "token": "invalid-token", - "error": { - "code": 404, - "description": "Tokens not found for invalid-token" - } - } - ] -} +DetokenizeResponse( + detokenized_fields=[ + {'token': '9738-1683-0486-1480', 'value': '4111111111111115', 'type': 'STRING'}, + {'token': '6184-6357-8409-6668', 'value': '4111111111111119', 'type': 'STRING'}, + {'token': '4914-9088-2814-3840', 'value': '4111111111111118', 'type': 'STRING'} + ], + errors=[] +) ``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/detokenize_with_continue_on_error_sample.py) of a detokenize call with continueOnError: +An example of a detokenize call with continue_on_error: ```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + try: - client.detokenize( - { - "records": [ - { - "token": "45012507-f72b-4f5c-9bf9-86b133bae719" - }, - { - "token": '1r434532-6f76-4319-bdd3-96281e051051', - "redaction": Skyflow.RedactionType.MASKED - } - ] - }, DetokenizeOptions(continueOnError=False) + detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-384'] + + detokenize_request = DetokenizeRequest( + tokens = detokenize_data, + continue_on_error = True, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "token": "131e70dc-6f76-4319-bdd3-96281e051051", - "value": "1990-01-01" - }, - { - "token": "1r434532-6f76-4319-bdd3-96281e051051", - "value": "xxxxxxer", - } - ] -} -``` - -### Get +DetokenizeResponse( + detokenized_fields=[ + { + 'token': '9738-1683-0486-1480', + 'value': '4111111111111115', + 'type': 'STRING' + }, + { + 'token': '6184-6357-8409-6668', + 'value': '4111111111111119', + 'type': 'STRING' + } + ], + errors=[ + { + 'token': '4914-9088-2814-384', + 'error': 'Token Not Found' + } + ] +) -To retrieve data using Skyflow IDs or unique column values, use the `get(records: dict,options: GetOptions)` method. The `records` parameter takes a Dictionary that contains either an array of Skyflow IDs or a unique column name and values.The second parameter options is a GetOptions object that retrieves tokens of Skyflow IDs. +``` -Note: +### Tokenize - - You can use either Skyflow IDs or `unique` values to retrieve records. You can't use both at the same time. - - GetOptions parameter applicable only for retrieving tokens using Skyflow ID. - - You can't pass GetOptions along with the redaction type. - - `tokens` defaults to false. +To tokenize data, use the `tokenize` method. The `TokenizeRequest` class is utilized to create a tokenize request. In this request, you specify the `values` parameter, which is a list of dictionaries. Each dictionary contains two keys: `value` and `column_group`. ```python -{ - 'records': [ - { - 'columnName': str, # Name of the unique column. - 'columnValues': [str], # List of unique column values. - 'table': str, # Name of table holding the data. - 'redaction': Skyflow.RedactionType, # Redaction applied to retrieved data. - } - ] -} - or -{ - 'records': [ - { - 'ids': [str], # List of Skyflow IDs. - 'table': str, # Name of table holding the data. - 'redaction': Skyflow.RedactionType, # Redaction applied to retrieved data. - } - ] -} +from skyflow.vault.tokens import TokenizeRequest +tokenize_request = TokenizeRequest( + values = [{ + 'value': '', + 'column_group': '' + }] +) ``` + Sample usage -The following snippet shows how to use the `get()` method. For details, see [get_sample.py](https://github.com/skyflowapi/skyflow-python/blob/main/samples/get_sample.py), - -```python -from skyflow.vault import RedactionType - -skyflowIDs = ['f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9'] -record = {'ids': skyflowIDs, 'table': 'cards', 'redaction':RedactionType.PLAIN_TEXT} -recordsWithUniqueColumn = - { - 'table': 'test_table', - 'columnName': 'card_number', - 'columnValues': ['123456'], - 'redaction': RedactionType.PLAIN_TEXT - } - -invalidID = ['invalid skyflow ID'] -badRecord = {'ids': invalidID, 'table': 'cards', 'redaction': RedactionType.PLAIN_TEXT} - -records = {'records': [record, badRecord]} - +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/tokenize_records.py) of a tokenize call: + +```python +from skyflow.error import SkyflowError +from skyflow.vault.tokens import TokenizeRequest + try: - client.get(records) -except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + tokenize_request = TokenizeRequest( + values = [{ + "value": '4111111111111111', + "column_group": "card_number_cg" + }] + ) + + response = client.vault('').tokenize(tokenize_request) + print(response) +except SyntaxError as e: + print('Error Occurred: ', e) ``` Sample response: ```python -{ - 'records': [ - { - 'fields': { - 'card_number': '4111111111111111', - 'cvv': '127', - 'expiry_date': '11/35', - 'fullname': 'monica', - 'skyflow_id': 'f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9' - }, - 'table': 'cards' - }, - { - 'fields': { - 'card_number': '123456', - 'cvv': '317', - 'expiry_date': '10/23', - 'fullname': 'sam', - 'skyflow_id': 'da26de53-95d5-4bdb-99db-8d8c66a35ff9' - }, - 'table': 'cards' - } - ], - 'errors': [ - { - 'error': { - 'code': '404', - 'description': 'No Records Found' - }, - 'skyflow_ids': ['invalid skyflow id'] - } - ] -} +TokenizeResponse( + tokenized_fields=[ + { + 'token': '5479-4229-4622-1393' + } + ] +) + ``` -The following snippet shows how to use the `get()` method with GetOptions. +### Get + +To retrieve data using Skyflow IDs or unique column values, use the `get` method. The `GetRequest` class is used to create a get request, where you specify parameters such as the table name, redaction type, Skyflow IDs, column names, column values, and return tokens. If Skyflow IDs are provided, column names and column values cannot be used. Similarly, if column names or column values are provided, Skyflow IDs cannot be used. ```python -from skyflow.vault import GetOptions +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest + +GetRequest( + table = '', + ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], + return_tokens = True, + redaction_type = RedactionType.PLAIN_TEXT +) + +# or + +GetRequest( + table = '', + column_name ='', + column_values = ['COLUMN_VALUE1>', 'COLUMN_VALUE2>'], + redaction_type = RedactionType.PLAIN_TEXT +) +``` + +Sample usage + +### Get By Column Name and Column Values + +The following snippet shows how to use the `get` method using column names and column values. For details, see [get_column_values.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_column_values.py), + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest -{ - 'records': [ - { - 'ids': ['56513264-fc45-41fa-9cb0-d1ad3602bc49','da26de53-95d5-4bdb-99db-8d8c66a35ff9'], - 'table': 'cards', - } - ] -} - try: - client.get(records, GetOptions(True)) + column_values = [ + '123456' + ] + + get_request = GetRequest( + table = 'table1', + column_name = 'card_number', # It must be configured as unique in the schema. + column_values = column_values, + redaction_type = RedactionType.PLAIN_TEXT + ) + + response = skyflow_client.vault('').get(get_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - 'records': [ - { - 'fields': { - 'card_number': '4555-5176-5936-1930', - 'cvv': '6ad5f708-2061-453e-9491-618a1f29a688', - 'skyflow_id': '56513264-fc45-41fa-9cb0-d1ad3602bc49' - }, - 'table': 'cards' - }, - { - 'fields': { - 'card_number': '8882-7418-2776-6660', - 'cvv': '25260679-e339-4b33-a5b0-c8b08df77af7', - 'skyflow_id': 'da26de53-95d5-4bdb-99db-8d8c66a35ff9' - }, - 'table': 'cards' - } - ], - 'errors': [] -} -``` +GetResponse( + data=[ + { + 'card_number': '123456', + 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968' + } + ], + errors=[] +) -### Get By Id +``` -For retrieving using SkyflowID's, use the get_by_id(records: dict) method. The records parameter takes a Dictionary that contains records to be fetched as shown below: +### Get By Skyflow Ids ```python -{ - "records": [ - { - "ids": [str], # List of SkyflowID's of the records to be fetched - "table": str, # name of table holding the above skyflow_id's - "redaction": Skyflow.RedactionType, # redaction to be applied to retrieved data - } - ] -} +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest + +GetRequest( + table = '', + ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], + return_tokens = True, + redaction_type = RedactionType.PLAIN_TEXT +) ``` #### Redaction Types + There are 4 accepted values in Skyflow.RedactionTypes: - `PLAIN_TEXT` @@ -603,245 +793,244 @@ There are 4 accepted values in Skyflow.RedactionTypes: - `REDACTED` - `DEFAULT` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/get_by_ids_sample.py) of get_by_id call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_records.py) of get by skyflow ids call: ```python -from skyflow.vault import RedactionType +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest -skyflowIDs = [ - "f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9", - "da26de53-95d5-4bdb-99db-8d8c66a35ff9" -] -record = {"ids": skyflowIDs, "table": "cards", "redaction": RedactionType.PLAIN_TEXT} +try: + get_request = GetRequest( + table = 'table1', + ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], + redaction_type = RedactionType.PLAIN_TEXT + ) -invalidID = ["invalid skyflow ID"] -badRecord = {"ids": invalidID, "table": "cards", "redaction": RedactionType.PLAIN_TEXT} + response = client.vault('').get(get_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) +``` -records = {"records": [record, badRecord]} +Sample response: + +```python +GetResponse( + data=[ + { + 'card_number': '4555555555555553', + 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + }, + { + 'card_number': '4555555555555559', + 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + } + ], + errors=[] +) + +``` + +The following snippet shows how to use the `get()` method with return_tokens true. + +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import GetRequest try: - client.get_by_id(records) + get_request = GetRequest( + table = 'table1', + ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], + return_tokens = True + ) + + response = client.vault('').get(get_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "fields": { - "card_number": "4111111111111111", - "cvv": "127", - "expiry_date": "11/35", - "fullname": "myname", - "skyflow_id": "f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9" - }, - "table": "cards" - }, - { - "fields": { - "card_number": "4111111111111111", - "cvv": "317", - "expiry_date": "10/23", - "fullname": "sam", - "skyflow_id": "da26de53-95d5-4bdb-99db-8d8c66a35ff9" - }, - "table": "cards" - } - ], - "errors": [ - { - "error": { - "code": "404", - "description": "No Records Found" - }, - "skyflow_ids": ["invalid skyflow id"] - } - ] -} +GetResponse( + data=[ + { + 'card_number': '3562-0140-8820-7499', + 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + }, + { + 'card_number': '3998-2139-0328-0697', + 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + } + ], + errors=[] +) ``` -`Note:` While using detokenize and get_by_id methods, there is a possibility that some or all of the tokens might be invalid. In such cases, the data from response consists of both errors and detokenized records. In the SDK, this will raise a SkyflowError Exception and you can retrieve the data from this Exception object as shown above. - ### Update -To update data in your vault, use the `update(records: dict, options: UpdateOptions)` method. The `records` parameter takes a Dictionary that contains records to fetch. If `UpdateTokens` is `True`, Skyflow returns tokens for the record you just updated. If `UpdateOptions` is `False`, Skyflow returns IDs for the record you updated. +To update data in your vault, use the `update` method. The `UpdateRequest` class is used to create an update request, where you specify parameters such as the table name, data (as a dictionary), tokens, return_tokens, and token_strict. If `return_tokens` is set to True, Skyflow returns tokens for the updated records. If `return_tokens` is set to False, Skyflow returns IDs for the updated records. ```python -# Optional, indicates whether to return all fields for updated data. Defaults to 'true'. -options: UpdateOptions -``` +from skyflow.error import SkyflowError +from skyflow.vault.data import UpdateRequest -```python -{ - 'records': [ - { - 'id': str, # Skyflow ID of the record to be updated. - 'table': str, # Name of table holding the skyflowID. - 'fields': { - str: str # Name of the column and value to update. - } - } - ] -} +try: + update_data = { + 'skyflow_id': '', + '': '' + } + + update_request = UpdateRequest( + table='TABLE_NAME', + data=update_data + ) + + response = skyflow_client.vault('VAULT_ID').update(update_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` + Sample usage -The following snippet shows how to use the `update()` method. For details, see [update_sample.py](https://github.com/skyflowapi/skyflow-python/blob/main/samples/update_sample.py), +The following snippet shows how to use the `update()` method. For details, see [update_record.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/update_record.py), ```python -records = { - 'records': [ - { - 'id': '56513264-fc45-41fa-9cb0-d1ad3602bc49', - 'table': 'cards', - 'fields': { - 'card_number': '45678910234' - } - } - ] - } +from skyflow.error import SkyflowError +from skyflow.vault.data import UpdateRequest + try: - client.update(records, UpdateOptions(True)) + update_data = { + 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', + 'card_number': '4111111111117777' + } + + update_request = UpdateRequest( + table = 'table1', + data = update_data + ) + + response = skyflow_client.vault('').update(update_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + print('Error Occurred:', e) ``` Sample response -`UpdateOptions` set to `True` +`return_tokens` set to `True` ```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49', - 'fields':{ - 'card_number':'0051-6502-5704-9879' - } - } - ], - 'errors':[] -} -``` - -`UpdateOptions` set to `False` +UpdateResponse( + updated_field={ + 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', + 'card_number': '4131-1751-0217-8491' + }, + errors=[] +) -```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49' - } - ], - 'errors':[] -} ``` -Sample Error +`return_tokens` set to `False` ```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49' - } - ], - 'errors':[ - { - 'error':{ - 'code':404, - 'description':'Token for skyflowID doesn"t exist in vault - Request ID: a8def196-9569-9cb7-9974-f899f9e4bd0a' - } - } - ] -} +UpdateResponse( + updated_field={'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315'}, + errors=[] +) + ``` ### Delete -For deleting using SkyflowID's, use the delete(records: dict) method. The records parameter takes a Dictionary that contains records to be deleted as shown below: +To delete records using Skyflow IDs, use the `delete` method. The `DeleteRequest` class accepts a list of Skyflow IDs that you want to delete, as shown below: ```python -{ - "records": [ - { - "id": str, # SkyflowID of the records to be deleted - "table": str, # name of table holding the above skyflow_id - }, - { - "id": str, # SkyflowID of the records to be deleted - "table": str, # name of table holding the above skyflow_id - } - ] -} +from skyflow.error import SkyflowError +from skyflow.vault.data import DeleteRequest + +primary_delete_ids = [ + 'SKYFLOW_ID1', + 'SKYFLOW_ID2', + 'SKYFLOW_ID3', +] + +delete_request = DeleteRequest( + table = '', + ids = primary_delete_ids +) ``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/delete_sample.py) of delete call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/delete_records.py) of delete call: ```python +from skyflow.error import SkyflowError +from skyflow.vault.data import DeleteRequest -skyflowID = "b3d52e6d-1d6c-4750-ba28-aa30d04dbf01" -record = {"id": skyflowID, "table": "cards"} - -invalidID = "invalid skyflow ID" -badRecord = {"id": invalidID, "table": "cards"} +try: + delete_ids = [ + '77e093f8-3ace-4295-8683-bb6745d6178e', + 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' + ] -records = {"records": [record, badRecord]} + delete_request = DeleteRequest( + table='table1', + ids=delete_ids + ) -try: - client.delete(records) + response = client.vault('').delete(delete_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records":[ - { - "skyflow_id":"b3d52e6d-1d6c-4750-ba28-aa30d04dbf01", - "deleted":true - } - ], - "errors":[ - { - "id":"invalid skyflow ID", - "error":{ - "code":404, - "description":"No Records Found - request id: 239d462c-aa13-9f9d-a349-165b3dd11217" - } - } - ] -} +DeleteResponse( + deleted_ids=[ + '77e093f8-3ace-4295-8683-bb6745d6178e', + 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' + ], + errors=[] +) + ``` ### Invoke Connection -Using Skyflow Connection, end-user applications can integrate checkout/card issuance flow with their apps/systems. To invoke connection, use the invoke_connection(config: Skyflow.ConnectionConfig) method of the Skyflow client. +Using Skyflow Connection, end-user applications can integrate checkout/card issuance flow with their apps/systems. To invoke connection, use the `invoke` method of the Skyflow client. ```python -config = ConnectionConfig( - connectionURL: str, # connection url received when creating a skyflow connection integration - methodName: Skyflow.RequestMethod, - pathParams: dict, # optional - queryParams: dict, # optional - requestHeader: dict, # optional - requestBody: dict, # optional +from skyflow.error import SkyflowError +from skyflow.vault.connection import InvokeConnectionRequest + +body = { + 'KEY1': 'VALUE1', + 'KEY2': 'VALUE2' +} +headers = { + 'KEY1': 'VALUE1' +} +path_params = { + 'KEY1': 'VALUE1' +} +query_params = { + 'KEY1': 'VALUE1' +} + +invoke_connection_request = InvokeConnectionRequest( + method = Method.POST, + body = body, + headers = headers, # optional + path_params = path_params, # optional + query_params = query_params # optional ) -client.invokeConnection(config) ``` `methodName` supports the following methods: @@ -852,77 +1041,88 @@ client.invokeConnection(config) - PATCH - DELETE -**pathParams, queryParams, requestHeader, requestBody** are the JSON objects represented as dictionaries that will be sent through the connection integration url. +**path_params, query_params, request_header, request_body** are the JSON objects represented as dictionaries that will be sent through the connection integration url. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/invoke_connection_sample.py) of invoke_connection: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/invoke_connection.py) of invoke_connection: ```python -from skyflow.vault import ConnectionConfig, Configuration, RequestMethod +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow.utils.enums import Method +from skyflow.error import SkyflowError +from skyflow.vault.connection import InvokeConnectionRequest + +credentials = { + 'path': '/path/to/credentials.json', +} -bearerToken = '' -def token_provider(): - global bearerToken - if !(is_expired(bearerToken)): - return bearerToken - bearerToken, _ = generate_bearer_token('') - return bearerToken +client = ( + Skyflow.builder() + .add_connection_config({ + 'connection_id': '', + 'connection_url': '', + 'credentials': credentials + }) + .set_log_level(LogLevel.OFF) + .build() +) -try: - config = Configuration('', '', token_provider) - connectionConfig = ConnectionConfig('', RequestMethod.POST, - requestHeader={ - 'Content-Type': 'application/json', - 'Authorization': '' - }, - requestBody= # For third party integration - { - "expirationDate": { - "mm": "12", - "yy": "22" - } +invoke_connection_request = InvokeConnectionRequest( + method=Method.POST, + body={ + 'card_number': '4337-1696-5866-0865', + 'ssn': '524-41-4248' }, - pathParams={'cardID': ''}) # param as in the example - client = Client(config) + headers = { + 'Content-Type': 'application/json' + } +) + +response = client.connection('').invoke(invoke_connection_request) + +print(response) - response = client.invoke_connection(connectionConfig) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) ``` Sample response: ```python -{ - "receivedTimestamp": "2021-11-05 13:43:12.534", - "processingTimeinMs": 12, - "resource": { - "cvv2": "558" +ConnectionResponse( + { + 'card_number': '4337-1696-5866-0865', + 'ssn': '524-41-4248', + 'request_id': '84796a11-0b7d-4cb0-a348-cf9fefb5886f,84796a11-0b7d-4cb0-a348-cf9fefb5886f' } -} +) + ``` ### Query -To retrieve data with SQL queries, use the `query(queryInput, options)` method. `queryInput` is an object that takes the `query` parameter as follows: +To retrieve data with SQL queries, use the `query` method. `QueryRequest` is class that takes the `query` parameter as follows: ```python -{ - query: str # SQL query statement -} +from skyflow.vault.data import QueryRequest + +query_request = QueryRequest( + query= '' +) ``` See [Query your data](https://docs.skyflow.com/query-data/) and [Execute Query](https://docs.skyflow.com/record/#QueryService_ExecuteQuery) for guidelines and restrictions on supported SQL statements, operators, and keywords. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/query_sample.py) of Query call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/query_records.py) of Query call: ```python -queryInput = { - query: "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" -} +from skyflow.error import SkyflowError +from skyflow.vault.data import QueryRequest + +query_request = QueryRequest( + query = "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" +) try: - client.query(queryInput) + skyflow_client.vault('').query(query_request) except SkyflowError as e: if e.data: print(e.data) @@ -933,37 +1133,58 @@ except SkyflowError as e: Sample Response ```python -{ - "records": [ - { - "fields": { - "card_number": "XXXXXXXXXXXX1111", - "card_pin": "*REDACTED*", - "cvv": "", - "expiration_date": "*REDACTED*", - "expiration_month": "*REDACTED*", - "expiration_year": "*REDACTED*", - "name": "a***te", - "skyflow_id": "3ea3861-x107-40w8-la98-106sp08ea83f", - "ssn": "XXX-XX-6789", - "zip_code": None - }, - "tokens": None - } - ] -} +QueryResponse( + fields=[ + { + 'card_number': 'XXXXXXXXXXXX1112', + 'name': 'S***ar', + 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968', + 'tokenized_data': {} + } + ], + errors=[] +) ``` ## Logging -The skyflow python SDK provides useful logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(logLevel)` as shown below: +The skyflow python SDK provides useful logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(log_level)` as shown below: ```python -import logging -from skyflow import set_log_level, LogLevel +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } +credentials_string = json.dumps(skyflow_credentials) + +# Pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: "API_KEY", # API_KEY + # path: "PATH", # path to credentials file + # credentials_string: credentials_string, # credentials as string +} -logging.basicConfig() # You can set the basic config here -set_log_level(LogLevel.INFO) # sets the skyflow SDK log level to INFO +client = ( + Skyflow.builder() + .add_vault_config({ + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials # individual credentials + }) + .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) ``` Current the following 5 log levels are supported: diff --git a/ci-scripts/bump_version.sh b/ci-scripts/bump_version.sh index b0a57a9e..a770e905 100755 --- a/ci-scripts/bump_version.sh +++ b/ci-scripts/bump_version.sh @@ -1,22 +1,24 @@ Version=$1 SEMVER=$Version -if [ -z $2 ] +if [ -z "$2" ] then - echo "Bumping package version to $1" + echo "Bumping package version to $1" - sed -E "s/current_version = .+/current_version = \'$SEMVER\'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile - sed -E "s/SDK_VERSION = .+/SDK_VERSION = \'$SEMVER\'/g" skyflow/version.py > tempfile && cat tempfile > skyflow/version.py && rm -f tempfile - - echo -------------------------- - echo "Done, Package now at $1" + sed -E "s/current_version = .+/current_version = '$SEMVER'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile + sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$SEMVER'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile + echo -------------------------- + echo "Done, Package now at $1" else - echo "Bumping package version to $1-dev.$2" + # Use dev version with commit SHA + DEV_VERSION="${SEMVER}.dev0+$(echo $2 | tr -dc '0-9a-f')" + + echo "Bumping package version to $DEV_VERSION" - sed -E "s/current_version = .+/current_version = \'$SEMVER-dev.$2\'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile - sed -E "s/SDK_VERSION = .+/SDK_VERSION = \'$SEMVER-dev.$2\'/g" skyflow/version.py > tempfile && cat tempfile > skyflow/version.py && rm -f tempfile + sed -E "s/current_version = .+/current_version = '$DEV_VERSION'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile + sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$DEV_VERSION'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile - echo -------------------------- - echo "Done, Package now at $1-dev.$2" + echo -------------------------- + echo "Done, Package now at $DEV_VERSION" fi diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..46a85940 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.25.3, < 2.1.0 +pydantic >= 2 +typing-extensions >= 4.7.1 +DateTime~=5.5 +PyJWT~=2.9.0 +requests~=2.32.3 +coverage +cryptography +python-dotenv~=1.0.1 \ No newline at end of file diff --git a/samples/README.md b/samples/README.md deleted file mode 100644 index 64863dbf..00000000 --- a/samples/README.md +++ /dev/null @@ -1,211 +0,0 @@ -# Python SDK samples - -Test the SDK by adding `VAULT-ID`, `VAULT-URL`, and `SERVICE-ACCOUNT` details in -the required places for each sample. - -## Prerequisites -- A Skyflow account. If you don't have one, register for one on the - [Try Skyflow](https://skyflow.com/try-skyflow) page. -- Python 3.7.0 or higher. - -## Prepare - -### Install the Python SDK - -```bash -pip install skyflow -``` - -### Create the vault - -1. In a browser, sign in to Skyflow Studio. -2. Create a vault by clicking **Create Vault** > **Start With a Template** > - **Quickstart vault**. -3. Once the vault is ready, click the gear icon and select **Edit Vault Details**. -4. Note your **Vault URL** and **Vault ID** values, then click **Cancel**. - You'll need these later. - -### Create a service account - -1. In the side navigation click, **IAM** > **Service Accounts** > **New Service Account**. -2. For **Name**, enter "SDK Sample". For **Roles**, choose **Vault Editor**. -3. Click **Create**. Your browser downloads a **credentials.json** file. Keep - this file secure, as You'll need it for each of the samples. - -## The samples -### [Get data](./get_sample.py) - -To retrieve data using Skyflow IDs or unique column values, use the `get(records: dict)` method. The `records` parameter takes a Dictionary that contains either an array of Skyflow IDs or a unique column name and values. - -Note: You can use either Skyflow IDs or `unique` values to retrieve records. You can't use both at the same time. -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | One of the four Redaction Types. | -| `` | Skyflow Id of the record to be fetched. | -| `` | Unique column name to fetch the data. | -| `` | Column value of the corresponding column. | - -#### Run the sample - -```bash -python3 get_sample.py -``` -### [Get data by ID](./get_by_ids_sample.py) - -Get data using Skyflow IDs for the desired records. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | Skyflow ID of the first record. | -| `` | Skyflow ID of the second record. | -| `` | Skyflow ID of the third record. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to get data from. | - -#### Run the sample - -```bash -python3 get_by_ids_sample.py -``` - - -### [Update data](./update_sample.py) - -Update data in the vault. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | Skyflow Id of the record to be updated. | -| `` | Name of the column to update data. | -| `` | Valid value to update into the corresponding column. | - -#### Run the sample - -```bash -python3 update_sample.py -``` - -### [Insert data](./insert_sample.py) - -Insert data in the vault. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | Name of the column to insert data into. | -| `` | Valid value to insert into the corresponding column. | - -#### Run the sample - -```bash -python3 insert_sample.py -``` - -### [Detokenize data](./detokenize_sample.py) - -Detokenize a data token from the vault. Make sure the specified token is for -data that exists in the vault. If you need a valid token, use -[insert_sample.py](insert_sample.py) to insert the data, then use this data's -token for detokenization. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the column to insert data into. | -| `` | Token for the data you want to detokenize. | - -#### Run the sample - -```bash -python3 detokenize_sample.py -``` - -### [Invoke a connection](./invoke_connection_sample.py) - -Skyflow Connections is a gateway service that uses Skyflow's underlying -tokenization capabilities to securely connect to first-party and third-party -services. This way, your infrastructure is never directly exposed to sensitive -data, and you offload security and compliance requirements to Skyflow. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | URL of your connection. | -| `` | Access token for your connection. | -| `requestBody` | Your request body content. | -| `pathParams` | Your path parameters. | - -#### Run the sample - -```bash -python3 invoke_connection_sample.py -``` - -### [Service account token generation](./sa_token_sample.py) - -Generates SA Token using path of credentials file. - -#### Configure - -Replace `` with the relative path to your service account credentials file. - -#### Run the sample - -```bash -python3 sa_token_sample.py -``` - -### [Generate Bearer Token](./generate_bearer_token_from_creds_sample.py) - -Generates SA Token using json content of credentials file. - -#### Configure - -Replace `credentials` with the content of service account credentials file. - -#### Run the sample - -```bash -python3 generate_bearer_token_from_creds_sample.py -``` diff --git a/samples/delete_sample.py b/samples/delete_sample.py deleted file mode 100644 index 85137e6f..00000000 --- a/samples/delete_sample.py +++ /dev/null @@ -1,40 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration,DeleteOptions - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - options = DeleteOptions(False) - - data = {"records": [ - { - "id": "", - "table": "", - }, - { - "id": "", - "table": "", - } - ]} - - response = client.delete(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/detokenize_sample.py b/samples/detokenize_sample.py deleted file mode 100644 index 4cfae8c1..00000000 --- a/samples/detokenize_sample.py +++ /dev/null @@ -1,41 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration -from skyflow.vault import RedactionType - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = { - "records": [ - { - "token": '', - "redaction": RedactionType.MASKED - }, - { - "token": '', - } - ] - } - response = client.detokenize(data) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/detokenize_with_continue_on_error_sample.py b/samples/detokenize_with_continue_on_error_sample.py deleted file mode 100644 index 18927fc1..00000000 --- a/samples/detokenize_with_continue_on_error_sample.py +++ /dev/null @@ -1,54 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration -from skyflow.vault import RedactionType -from skyflow.vault._config import DetokenizeOptions - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - -def detokenize(client, data): - try: - response = client.detokenize(data, DetokenizeOptions(continueOnError=True)) - print('Response:', response) - except SkyflowError as e: - print('Error Occurred:', e) - -def bulkDetokenize(client, data): - try: - response = client.detokenize(data, DetokenizeOptions(continueOnError=False)) - print('Response:', response) - except SkyflowError as e: - print('Error Occurred:', e) - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = { - "records": [ - { - "token": '', - "redaction": RedactionType.MASKED - }, - { - "token": '', - } - ] - } - - detokenize(client, data) - bulkDetokenize(client, data) -except Exception as e: - print('Something went wrong:', e) diff --git a/samples/generate_bearer_token_from_creds_sample.py b/samples/generate_bearer_token_from_creds_sample.py deleted file mode 100644 index 7c36c47e..00000000 --- a/samples/generate_bearer_token_from_creds_sample.py +++ /dev/null @@ -1,45 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token_from_creds, is_expired - -''' - This sample demonstrates the usage of generate_bearer_token_from_creds - - - Use json.dumps(credentialsString) to make it a valid json string - - Use generate_bearer_token_from_creds(jsonString) to get the Bearer Token -''' - -# cache token for reuse -bearerToken = '' -tokenType = '' - - -def token_provider(): - global bearerToken - global tokenType - # As an example - credentials = { - "clientID": "", - "clientName": "", - "keyID": "", - "tokenURI": '', - "privateKey": "" - } - jsonString = json.dumps(credentials) - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token_from_creds( - credentials=jsonString) - - return bearerToken - - -try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) -except SkyflowError as e: - print(e) diff --git a/samples/get_by_ids_sample.py b/samples/get_by_ids_sample.py deleted file mode 100644 index 9eeece8d..00000000 --- a/samples/get_by_ids_sample.py +++ /dev/null @@ -1,36 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - } - ]} - - response = client.get_by_id(data) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/get_sample.py b/samples/get_sample.py deleted file mode 100644 index 3a741864..00000000 --- a/samples/get_sample.py +++ /dev/null @@ -1,45 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType, GetOptions - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = GetOptions(False) - - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - }, - #To get records using unique column name and values. - { - "redaction" : "", - "table": "", - "columnName": "", - "columnValues": ["", ""], - } - ]} - - response = client.get(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/get_with_options.py b/samples/get_with_options.py deleted file mode 100644 index 8fda1136..00000000 --- a/samples/get_with_options.py +++ /dev/null @@ -1,34 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType, GetOptions - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - options = GetOptions(False) - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - } - ]} - - response = client.get(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_sample.py b/samples/insert_sample.py deleted file mode 100644 index 14756c92..00000000 --- a/samples/insert_sample.py +++ /dev/null @@ -1,38 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = InsertOptions(True) - - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_upsert_sample.py b/samples/insert_upsert_sample.py deleted file mode 100644 index aec06c49..00000000 --- a/samples/insert_upsert_sample.py +++ /dev/null @@ -1,39 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration, UpsertOption - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - upsertOption = UpsertOption(table='',column='') - options = InsertOptions(tokens=True,upsert=[upsertOption]) - - data = { - 'records': [ - { - 'table': '', - 'fields': { - '': '' - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_with_continue_on_error_sample.py b/samples/insert_with_continue_on_error_sample.py deleted file mode 100644 index 19df2b20..00000000 --- a/samples/insert_with_continue_on_error_sample.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = InsertOptions(tokens=True, continueOnError=True) - - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - }, - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/invoke_connection_sample.py b/samples/invoke_connection_sample.py deleted file mode 100644 index 7cf54319..00000000 --- a/samples/invoke_connection_sample.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RequestMethod, ConnectionConfig - -''' -This sample is for generating CVV using Skyflow Connection with a third party integration such as VISA -''' - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - connectionConfig = ConnectionConfig('', RequestMethod.POST, - requestHeader={ - 'Content-Type': 'application/json', - 'Authorization': '' - }, - requestBody= # For third party integration - { - "expirationDate": { - "mm": "12", - "yy": "22" - } - }, - pathParams={'cardID': ''}) # param as in the example - client = Client(config) - - response = client.invoke_connection(connectionConfig) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/query_sample.py b/samples/query_sample.py deleted file mode 100644 index 6b4d507c..00000000 --- a/samples/query_sample.py +++ /dev/null @@ -1,35 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' - -from skyflow import set_log_level, LogLevel -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - set_log_level(LogLevel.DEBUG) - - data = { - "query": "" - } - response = client.query(data) - print('Response:', response) -except SkyflowError as e: - if(e.data): - print('Error Occurred:', e.data) - else: - print('Error Occurred:', e.message) \ No newline at end of file diff --git a/samples/sa_token_sample.py b/samples/sa_token_sample.py deleted file mode 100644 index 9169cb4b..00000000 --- a/samples/sa_token_sample.py +++ /dev/null @@ -1,26 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearerToken = '' -tokenType = '' - - -def token_provider(): - global bearerToken - global tokenType - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token( - '') - return bearerToken, tokenType - - -try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) -except SkyflowError as e: - print(e) diff --git a/samples/service_account/scoped_token_generation_example.py b/samples/service_account/scoped_token_generation_example.py new file mode 100644 index 00000000..08c30a75 --- /dev/null +++ b/samples/service_account/scoped_token_generation_example.py @@ -0,0 +1,56 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = '' +bearer_token = '' + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +options = {'role_ids': ['ROLE_ID1', 'ROLE_ID2']} + +def get_scoped_bearer_token_from_file_path(): + # Generate scoped bearer token from credentials file path. + global bearer_token + + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token(file_path, options) + bearer_token = token + return bearer_token + + except Exception as e: + print(f'Error generating token from file path: {str(e)}') + + + +def get_scoped_bearer_token_from_credentials_string(): + # Generate scoped bearer token from credentials string. + global bearer_token + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token_from_creds(credentials_string, options) + bearer_token = token + return bearer_token + except Exception as e: + print(f"Error generating token from credentials string: {str(e)}") + + +print(get_scoped_bearer_token_from_file_path()) + +print(get_scoped_bearer_token_from_credentials_string()) \ No newline at end of file diff --git a/samples/service_account/signed_token_generation_example.py b/samples/service_account/signed_token_generation_example.py new file mode 100644 index 00000000..1c97a1fb --- /dev/null +++ b/samples/service_account/signed_token_generation_example.py @@ -0,0 +1,59 @@ +import json +from skyflow.service_account import ( + is_expired, + generate_signed_data_tokens, + generate_signed_data_tokens_from_creds, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + + +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90, # in seconds +} + +def get_signed_bearer_token_from_file_path(): + # Generate signed bearer token from credentials file path. + global bearer_token + + try: + if not is_expired(bearer_token): + return bearer_token + else: + data_token, signed_data_token = generate_signed_data_tokens(file_path, options) + return data_token, signed_data_token + + except Exception as e: + print(f'Error generating token from file path: {str(e)}') + + +def get_signed_bearer_token_from_credentials_string(): + # Generate signed bearer token from credentials string. + global bearer_token + + try: + if not is_expired(bearer_token): + return bearer_token + else: + data_token, signed_data_token = generate_signed_data_tokens_from_creds(credentials_string, options) + return data_token, signed_data_token + + except Exception as e: + print(f'Error generating token from credentials string: {str(e)}') + + +print(get_signed_bearer_token_from_file_path()) + +print(get_signed_bearer_token_from_credentials_string()) diff --git a/samples/service_account/token_generation_example.py b/samples/service_account/token_generation_example.py new file mode 100644 index 00000000..34db4c37 --- /dev/null +++ b/samples/service_account/token_generation_example.py @@ -0,0 +1,55 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + + +def get_bearer_token_from_file_path(): + # Generate bearer token from credentials file path. + global bearer_token + + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token(file_path) + bearer_token = token + return bearer_token + + except Exception as e: + print(f'Error generating token from file path: {str(e)}') + + +def get_bearer_token_from_credentials_string(): + # Generate bearer token from credentials string. + global bearer_token + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token_from_creds(credentials_string) + bearer_token = token + return bearer_token + except Exception as e: + print(f"Error generating token from credentials string: {str(e)}") + + + +print(get_bearer_token_from_file_path()) + +print(get_bearer_token_from_credentials_string()) \ No newline at end of file diff --git a/samples/service_account/token_generation_with_context_example.py b/samples/service_account/token_generation_with_context_example.py new file mode 100644 index 00000000..a43a072a --- /dev/null +++ b/samples/service_account/token_generation_with_context_example.py @@ -0,0 +1,55 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +options = {'ctx': ''} + +def get_bearer_token_with_context_from_file_path(): + # Generate bearer token with context from credentials file path. + global bearer_token + + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token(file_path, options) + bearer_token = token + return bearer_token + + except Exception as e: + print(f'Error generating token from file path: {str(e)}') + + +def get_bearer_token_with_context_from_credentials_string(): + # Generate bearer token with context from credentials string. + global bearer_token + try: + if not is_expired(bearer_token): + return bearer_token + else: + token, _ = generate_bearer_token_from_creds(credentials_string, options) + bearer_token = token + return bearer_token + except Exception as e: + print(f"Error generating token from credentials string: {str(e)}") + + +print(get_bearer_token_with_context_from_file_path()) + +print(get_bearer_token_with_context_from_credentials_string()) \ No newline at end of file diff --git a/samples/update_sample.py b/samples/update_sample.py deleted file mode 100644 index 9c6ea907..00000000 --- a/samples/update_sample.py +++ /dev/null @@ -1,39 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, UpdateOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = UpdateOptions(True) - - data = { - "records": [ - { - "id": "", - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.update(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/vault_api/client_operations.py b/samples/vault_api/client_operations.py new file mode 100644 index 00000000..80a8ca3a --- /dev/null +++ b/samples/vault_api/client_operations.py @@ -0,0 +1,92 @@ +from skyflow.error import SkyflowError +from skyflow import Skyflow, LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + +""" +Skyflow Secure Data Deletion Example + +This example demonstrates how to: + 1. Configure Skyflow client credentials + 2. Set up vault configuration + 3. Create a delete request + 4. Handle response and errors +""" + +def perform_secure_data_deletion(): + try: + # Step 1: Configure Bearer Token Credentials + credentials = { + 'token': '', # Bearer token + } + + # Step 2: Configure vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials, # Authentication method + } + + # Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config( + primary_vault_config + ) + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Add Secondary Vault Configuration + + secondary_vault_config = { + 'vault_id': 'YOUR_VAULT_ID2', # Secondary vault + 'cluster_id': 'YOUR_CLUSTER_ID2', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment + # If credentials aren't specified, Skyflow credentials will be used + } + + # Add secondary vault config on the fly + skyflow_client.add_vault_config(secondary_vault_config) + + # Step 5: Update Vault Configuration + updated_vault_config = { + 'vault_id': 'YOUR_VAULT_ID2', # Vault ID and cluster ID are unique + 'cluster_id': 'YOUR_CLUSTER_ID2', # Cluster ID from your vault URL + 'credentials': credentials, # Update credentials + } + + # Update vault config on the fly + skyflow_client.update_vault_config(updated_vault_config) + + # Step 6: Prepare Delete Request + delete_ids = ['', ''] + + table_name = '' # Replace with actual table name + + delete_request = DeleteRequest( + table=table_name, + ids=delete_ids + ) + + # Step 7: Perform Secure Deletion on Secondary Vault + response = skyflow_client.vault('YOUR_VAULT_ID2').delete(delete_request) + + # Handle Successful Response + print('Delete successful: ', response) + + # Step 8: Remove Secondary Vault Configuration + skyflow_client.remove_vault_config(secondary_vault_config.get('vault_id')) # Remove vault configuration + + except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the secure data deletion function +perform_secure_data_deletion() \ No newline at end of file diff --git a/samples/vault_api/credentials_options.py b/samples/vault_api/credentials_options.py new file mode 100644 index 00000000..db792042 --- /dev/null +++ b/samples/vault_api/credentials_options.py @@ -0,0 +1,94 @@ +from skyflow.error import SkyflowError +from skyflow import Skyflow, LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + +""" +Skyflow Secure Data Deletion Example + +This example demonstrates how to: + 1. Configure Skyflow client credentials + 2. Set up vault configuration + 3. Create and perform delete requests + 4. Handle response and errors +""" + +def perform_secure_data_deletion(): + try: + # Step 1: Configure Bearer Token Credentials + credentials = { + 'token': '', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: 'your_credentials_string', # Credentials as string + } + + # Step 2: Configure Vaults + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + } + + secondary_vault_config = { + 'vault_id': 'YOUR_SECONDARY_VAULT_ID', # Secondary vault + 'cluster_id': 'YOUR_SECONDARY_CLUSTER_ID', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_vault_config(secondary_vault_config) + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Delete Request for Primary Vault + primary_delete_ids = ['', ''] + + primary_table_name = '' # Replace with actual table name + + primary_delete_request = DeleteRequest( + table=primary_table_name, + ids=primary_delete_ids + ) + + # Perform Delete Operation for Primary Vault + primary_delete_response = skyflow_client.vault('').delete(primary_delete_request) + + # Handle Successful Response + print('Primary Vault Deletion Successful:', primary_delete_response) + + # Step 5: Prepare Delete Request for Secondary Vault + secondary_delete_ids = ['', ''] + + secondary_table_name = '' # Replace with actual table name + + secondary_delete_request = DeleteRequest( + table=secondary_table_name, + ids=secondary_delete_ids + ) + + # Perform Delete Operation for Secondary Vault + secondary_delete_response = skyflow_client.vault('').delete(secondary_delete_request) + + # Handle Successful Response + print('Secondary Vault Deletion Successful:', secondary_delete_response) + + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the secure data deletion function +perform_secure_data_deletion() \ No newline at end of file diff --git a/samples/vault_api/delete_records.py b/samples/vault_api/delete_records.py new file mode 100644 index 00000000..bc497072 --- /dev/null +++ b/samples/vault_api/delete_records.py @@ -0,0 +1,80 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + +""" +* Skyflow Delete Records Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a delete request + * 4. Handle response and errors +""" + +def perform_delete(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Token credentials + } + + credentials = { + 'api_key': '' # API key for authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Delete Data + delete_ids = ['SKYFLOW_ID1', 'SKYFLOW_ID2', 'SKYFLOW_ID3'] # Record IDs to delete + table_name = '' + + # Create Delete Request + delete_request = DeleteRequest( + table=table_name, + ids=delete_ids + ) + + # Step 5: Perform Deletion + response = skyflow_client.vault(primary_vault_config.get('vault_id')).delete(delete_request) + + # Handle Successful Response + print('Deletion successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the deletion function +perform_delete() \ No newline at end of file diff --git a/samples/vault_api/detokenize_records.py b/samples/vault_api/detokenize_records.py new file mode 100644 index 00000000..b76aa89e --- /dev/null +++ b/samples/vault_api/detokenize_records.py @@ -0,0 +1,82 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +""" + * Skyflow Detokenization Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a detokenization request + * 4. Handle response and errors +""" + +def perform_detokenization(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Token credentials + } + + credentials = { + 'token': '' # Bearer token for authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Detokenization Data + detokenize_data = ['token1', 'token2', 'token3'] # Tokens to be detokenized + redaction_type = RedactionType.REDACTED + + # Create Detokenize Request + detokenize_request = DetokenizeRequest( + tokens=detokenize_data, + redaction_type=redaction_type, + continue_on_error=True # Continue processing on errors + ) + + # Step 5: Perform Detokenization + response = skyflow_client.vault(primary_vault_config.get('vault_id')).detokenize(detokenize_request) + + # Handle Successful Response + print('Detokenization successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the detokenization function +perform_detokenization() \ No newline at end of file diff --git a/samples/vault_api/get_column_values.py b/samples/vault_api/get_column_values.py new file mode 100644 index 00000000..4b794c0d --- /dev/null +++ b/samples/vault_api/get_column_values.py @@ -0,0 +1,86 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import GetRequest + +""" + * Skyflow Secure Column-Based Retrieval Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a column-based get request + * 4. Handle response and errors +""" + +def perform_secure_column_retrieval(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Token credentials + } + + credentials = { + 'path': '' # Path to credentials file + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Column-Based Retrieval Data + column_values = [ + '', # Example Unique Column value 1 + '' # Example Unique Column value 2 + ] + table_name = '' # Replace with your actual table name + column_name = '' # Column name configured as unique in the schema + + # Step 5: Create Get Column Request + get_request = GetRequest( + table=table_name, + column_name=column_name, + column_values=column_values, # Column values of the records to return + return_tokens=True # Optional: Get tokens for retrieved data + ) + + # Step 6: Perform Secure Retrieval + response = skyflow_client.vault(primary_vault_config.get('vault_id')).get(get_request) + + # Handle Successful Response + print('Column-based retrieval successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the secure column retrieval function +perform_secure_column_retrieval() diff --git a/samples/vault_api/get_records.py b/samples/vault_api/get_records.py new file mode 100644 index 00000000..b2fd445f --- /dev/null +++ b/samples/vault_api/get_records.py @@ -0,0 +1,71 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import GetRequest + +def perform_secure_data_retrieval(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Token credentials + } + + credentials = { + 'path': '' # Path to credentials file + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Retrieval Data + + get_ids = ['', 'SKYFLOW_ID2'] + + get_request = GetRequest( + table='', # Replace with your actual table name + ids=get_ids, + ) + + # Step 6: Configure Get Options + response = skyflow_client.vault(primary_vault_config.get('vault_id')).get(get_request) + + # Handle Successful Response + print('Data retrieval successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the secure data retrieval function + +perform_secure_data_retrieval() \ No newline at end of file diff --git a/samples/vault_api/insert_byot.py b/samples/vault_api/insert_byot.py new file mode 100644 index 00000000..ae4c1eae --- /dev/null +++ b/samples/vault_api/insert_byot.py @@ -0,0 +1,99 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.error import SkyflowError +from skyflow.utils.enums import TokenMode +from skyflow.vault.data import InsertRequest + +""" + * Skyflow Insert with BYOT Example + * + * This example demonstrates: + * 1. Configuring Skyflow client credentials + * 2. Setting up vault configuration + * 3. Utilizing Bring Your Own Token (BYOT) during insertion + * 4. Handling responses and errors +""" + +def perform_secure_data_insertion_with_byot(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Token credentials + } + + credentials = { + 'token': '' # Bearer token for authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Insertion Data + insert_data = [ + { + 'card_number': '', + 'cvv': '', + }, + ] + + table_name = '' + + # Step 5: BYOT Configuration + tokens = [ + { + 'card_number': '', + 'cvv': '', + }, + ] + + insert_request = InsertRequest( + table_name=table_name, + values=insert_data, + token_mode=TokenMode.ENABLE, # Enable Bring Your Own Token (BYOT) + tokens=tokens, # Specify tokens to use for BYOT + return_tokens=True, # Optionally get tokens for inserted data + continue_on_error=True # Optionally continue on partial errors + ) + + # Step 6: Perform Secure Insertion + response = skyflow_client.vault(primary_vault_config.get('vault_id')).insert(insert_request) + + # Handle Successful Response + print('Insertion Successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the secure data insertion function +perform_secure_data_insertion_with_byot() \ No newline at end of file diff --git a/samples/vault_api/insert_records.py b/samples/vault_api/insert_records.py new file mode 100644 index 00000000..32ec1fae --- /dev/null +++ b/samples/vault_api/insert_records.py @@ -0,0 +1,73 @@ +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import InsertRequest + +""" + * Skyflow Secure Data Insertion Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create an insert request + * 4. Handle response and errors +""" +def perform_secure_data_insertion(): + try: + # Step 1: Configure Credentials + credentials = { + 'api_key': '' # Using API Key authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Insertion Data + insert_data = [ + { + 'card_number': '', + 'cvv': '', + }, + ] + + table_name = '' # Replace with your actual table name + + # Step 5: Create Insert Request + insert_request = InsertRequest( + table_name=table_name, + values=insert_data, + return_tokens=True, # Optional: Get tokens for inserted data + continue_on_error=True # Optional: Continue on partial errors + ) + + # Step 6: Perform Secure Insertion + response = skyflow_client.vault(primary_vault_config.get('vault_id')).insert(insert_request) + + # Handle Successful Response + print('Insertion Successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the secure data insertion function +perform_secure_data_insertion() \ No newline at end of file diff --git a/samples/vault_api/invoke_connection.py b/samples/vault_api/invoke_connection.py new file mode 100644 index 00000000..54f36106 --- /dev/null +++ b/samples/vault_api/invoke_connection.py @@ -0,0 +1,85 @@ +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import RequestMethod +from skyflow.vault.connection import InvokeConnectionRequest + +""" + * Skyflow Connection Invocation Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault and connection configurations + * 3. Invoke a connection + * 4. Handle response and errors +""" + +def invoke_skyflow_connection(): + try: + # Step 1: Configure Credentials + credentials = { + 'api_key': '' # Using API Key authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure Connection + primary_connection_config = { + 'connection_id': '', # Unique connection identifier + 'connection_url': '', # Connection url + 'credentials': credentials # Connection-specific credentials + } + + # Step 4: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_connection_config(primary_connection_config) + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 5: Prepare Connection Request + request_body = { + '': '', # Replace with actual key-value pairs + '': '' + } + + request_headers = { + 'Content-Type': 'application/json' + } + + request_method = RequestMethod.POST + + # Step 6: Create Invoke Connection Request + invoke_connection_request = InvokeConnectionRequest( + method=request_method, + body=request_body, + headers=request_headers + ) + + # Step 7: Invoke Connection + response = skyflow_client.connection().invoke(invoke_connection_request) + + # Handle Successful Response + print('Connection invocation successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the connection function +invoke_skyflow_connection() \ No newline at end of file diff --git a/samples/vault_api/query_records.py b/samples/vault_api/query_records.py new file mode 100644 index 00000000..0af332fd --- /dev/null +++ b/samples/vault_api/query_records.py @@ -0,0 +1,76 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import QueryRequest + +""" + * Skyflow Query Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Execute a query on the vault + * 4. Handle response and errors +""" +def execute_query(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) + } + + credentials = { + 'api_key': '' # Using API Key authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Query + query = 'select * from table_name limit 1' # Example query + + query_request = QueryRequest( + query=query + ) + + # Step 5: Execute Query + response = skyflow_client.vault(primary_vault_config.get('vault_id')).query(query_request) + + # Handle Successful Response + print('Query Result: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the query function +execute_query() \ No newline at end of file diff --git a/samples/vault_api/tokenize_records.py b/samples/vault_api/tokenize_records.py new file mode 100644 index 00000000..b709a965 --- /dev/null +++ b/samples/vault_api/tokenize_records.py @@ -0,0 +1,80 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.tokens import TokenizeRequest + +""" + * Skyflow Tokenization Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Tokenize sensitive data + * 4. Handle response and errors +""" + +def execute_tokenization(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', # Client identifier + 'clientName': '', # Client name + 'tokenURI': '', # Token URI + 'keyID': '', # Key identifier + 'privateKey': '', # Private key for authentication + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) + } + + credentials = { + 'api_key': '' # Using API Key authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) # Used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Tokenization Data + tokenize_values = [ + {'value': '', 'column_group': ''}, + {'value': '', 'column_group': ''}, + ] + + tokenize_request = TokenizeRequest( + values=tokenize_values + ) + + # Step 5: Execute Tokenization + response = skyflow_client.vault(primary_vault_config.get('vault_id')).tokenize(tokenize_request) + + # Handle Successful Response + print('Tokenization successful:', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the tokenization function +execute_tokenization() \ No newline at end of file diff --git a/samples/vault_api/update_record.py b/samples/vault_api/update_record.py new file mode 100644 index 00000000..d7f0969f --- /dev/null +++ b/samples/vault_api/update_record.py @@ -0,0 +1,68 @@ +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import UpdateRequest + +""" + * Skyflow Secure Data Update Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create an update request + * 4. Handle response and errors +""" + +def perform_secure_data_update(): + try: + credentials = { + 'api_key': '' # Using API Key authentication + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', # primary vault + 'cluster_id': '', # Cluster ID from your vault URL + 'env': Env.PROD, # Deployment environment (PROD by default) + 'credentials': credentials # Authentication method + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .set_log_level(LogLevel.ERROR) # Logging verbosity + .build() + ) + + # Step 4: Prepare Update Data + update_data = { + 'skyflow_id': '', # Skyflow ID of the record to update + 'card_number': '' # Updated sensitive data + } + + # Step 5: Create Update Request + update_request = UpdateRequest( + table='', + data=update_data, + return_tokens=True # Optional: Get tokens for updated data + ) + + # Step 7: Perform Secure Update + response = skyflow_client.vault(primary_vault_config.get('vault_id')).update(update_request) + + # Handle Successful Response + print('Update successful: ', response) + + except SkyflowError as error: + # Comprehensive Error Handling + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the secure data update function +perform_secure_data_update() \ No newline at end of file diff --git a/setup.py b/setup.py index c7756728..650c3de6 100644 --- a/setup.py +++ b/setup.py @@ -4,9 +4,10 @@ from setuptools import setup, find_packages import sys -if sys.version_info < (3, 7): - raise RuntimeError("skyflow requires Python 3.7+") -current_version = '1.15.1' + +if sys.version_info < (3, 8): + raise RuntimeError("skyflow requires Python 3.8+") +current_version = '1.15.1.dev0+9eff324' setup( name='skyflow', @@ -19,12 +20,17 @@ description='Skyflow SDK for the Python programming language', long_description=open('README.rst').read(), install_requires=[ - 'PyJWT', - 'datetime', - 'requests', - 'aiohttp', - 'asyncio', - 'cryptography>=3.3.1' - ], - python_requires=">=3.7" + 'python_dateutil >= 2.5.3', + 'setuptools >= 21.0.0', + 'urllib3 >= 1.25.3, < 2.1.0', + 'pydantic >= 2', + 'typing-extensions >= 4.7.1', + 'DateTime~=5.5', + 'PyJWT~=2.9.0', + 'requests~=2.32.3', + 'coverage', + 'cryptography', + 'python-dotenv~=1.0.1' +], + python_requires=">=3.8" ) diff --git a/skyflow/__init__.py b/skyflow/__init__.py index abeed968..fc02764f 100644 --- a/skyflow/__init__.py +++ b/skyflow/__init__.py @@ -1,4 +1,2 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._utils import set_log_level, LogLevel \ No newline at end of file +from .utils import LogLevel, Env +from .client import Skyflow diff --git a/skyflow/_utils.py b/skyflow/_utils.py deleted file mode 100644 index 83bf54a6..00000000 --- a/skyflow/_utils.py +++ /dev/null @@ -1,166 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import urllib.parse -import logging -from enum import Enum -import platform -import sys -from skyflow.version import SDK_VERSION - -skyflowLog = logging.getLogger('skyflow') -skyflowLog.setLevel(logging.ERROR) - -supported_content_types = { - "JSON": 'application/json', - "PLAINTEXT": 'text/plain', - "XML": 'text/xml', - "URLENCODED": 'application/x-www-form-urlencoded', - "FORMDATA": 'multipart/form-data', -} - - -class LogLevel(Enum): - DEBUG = logging.DEBUG - INFO = logging.INFO - WARN = logging.WARN - ERROR = logging.ERROR - OFF = logging.CRITICAL - - -def set_log_level(logLevel: LogLevel): - ''' - Sets the Log Level for the Skyflow python SDK - ''' - skyflowLog.setLevel(logLevel.value) - - -def log_info(message: str, interface: str): - formattedMessage = '{} {}'.format(interface, message) - skyflowLog.info(formattedMessage) - - -# def log_debug(message: str, interface: str): -# formattedMessage = '{} {}'.format(interface, message) -# skyflowLog.debug(formattedMessage) - - -def log_error(message: str, interface: str): - formattedMessage = '{} {}'.format(interface, message) - skyflowLog.error(formattedMessage) - - -class InfoMessages(Enum): - INITIALIZE_CLIENT = "Initializing skyflow client" - CLIENT_INITIALIZED = "Initialized skyflow client successfully" - VALIDATE_INSERT_RECORDS = "Validating insert records" - VALIDATE_DETOKENIZE_INPUT = "Validating detokenize input" - VALIDATE_GET_BY_ID_INPUT = "Validating getByID input" - VALIDATE_CONNECTION_CONFIG = "Validating connection config" - INSERT_DATA_SUCCESS = "Data has been inserted successfully." - DETOKENIZE_SUCCESS = "Data has been detokenized successfully." - GET_BY_ID_SUCCESS = "Data fetched from ID successfully." - QUERY_SUCCESS = "Query executed successfully." - BEARER_TOKEN_RECEIVED = "tokenProvider returned token successfully." - INSERT_TRIGGERED = "Insert method triggered." - DETOKENIZE_TRIGGERED = "Detokenize method triggered." - GET_BY_ID_TRIGGERED = "Get by ID triggered." - INVOKE_CONNECTION_TRIGGERED = "Invoke connection triggered." - QUERY_TRIGGERED = "Query method triggered." - GENERATE_BEARER_TOKEN_TRIGGERED = "Generate bearer token triggered" - GENERATE_BEARER_TOKEN_SUCCESS = "Generate bearer token returned successfully" - IS_TOKEN_VALID_TRIGGERED = "isTokenValid() triggered" - IS_EXPIRED_TRIGGERED = "is_expired() triggered" - EMPTY_ACCESS_TOKEN = "Give access token is empty" - INVALID_TOKEN = "Given token is invalid" - UPDATE_TRIGGERED = "Update method triggered" - UPDATE_DATA_SUCCESS = "Data has been updated successfully" - GET_TRIGGERED = "Get triggered." - GET_SUCCESS = "Data fetched successfully." - DELETE_TRIGGERED = "Delete triggered." - DELETE_DATA_SUCCESS = "Data has been deleted successfully." - - -class InterfaceName(Enum): - CLIENT = "client" - INSERT = "client.insert" - DETOKENIZE = "client.detokenize" - GET_BY_ID = "client.get_by_id" - GET = "client.get" - UPDATE = "client.update" - INVOKE_CONNECTION = "client.invoke_connection" - QUERY = "client.query" - GENERATE_BEARER_TOKEN = "service_account.generate_bearer_token" - - IS_TOKEN_VALID = "service_account.isTokenValid" - IS_EXPIRED = "service_account.is_expired" - DELETE = "client.delete" - - -def http_build_query(data): - ''' - Creates a form urlencoded string from python dictionary - urllib.urlencode() doesn't encode it in a php-esque way, this function helps in that - ''' - - return urllib.parse.urlencode(r_urlencode(list(), dict(), data)) - - -def r_urlencode(parents, pairs, data): - ''' - convert the python dict recursively into a php style associative dictionary - ''' - if isinstance(data, list) or isinstance(data, tuple): - for i in range(len(data)): - parents.append(i) - r_urlencode(parents, pairs, data[i]) - parents.pop() - elif isinstance(data, dict): - for key, value in data.items(): - parents.append(key) - r_urlencode(parents, pairs, value) - parents.pop() - else: - pairs[render_key(parents)] = str(data) - - return pairs - - -def render_key(parents): - ''' - renders the nested dictionary key as an associative array (php style dict) - ''' - depth, outStr = 0, '' - for x in parents: - s = "[%s]" if depth > 0 or isinstance(x, int) else "%s" - outStr += s % str(x) - depth += 1 - return outStr - -def getMetrics(): - ''' fetch metrics - ''' - sdk_name_version = "skyflow-python@" + SDK_VERSION - - try: - sdk_client_device_model = platform.node() - except Exception: - sdk_client_device_model = "" - - try: - sdk_client_os_details = sys.platform - except Exception: - sdk_client_os_details = "" - - try: - sdk_runtime_details = sys.version - except Exception: - sdk_runtime_details = "" - - details_dic = { - 'sdk_name_version': sdk_name_version, - 'sdk_client_device_model': sdk_client_device_model, - 'sdk_client_os_details': sdk_client_os_details, - 'sdk_runtime_details': "Python " + sdk_runtime_details, - } - return details_dic \ No newline at end of file diff --git a/skyflow/client/__init__.py b/skyflow/client/__init__.py new file mode 100644 index 00000000..246ca2f6 --- /dev/null +++ b/skyflow/client/__init__.py @@ -0,0 +1 @@ +from .skyflow import Skyflow diff --git a/skyflow/client/skyflow.py b/skyflow/client/skyflow.py new file mode 100644 index 00000000..be3f7d9a --- /dev/null +++ b/skyflow/client/skyflow.py @@ -0,0 +1,237 @@ +from collections import OrderedDict +from skyflow import LogLevel +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.logger import log_info, Logger, log_error +from skyflow.utils.validations import validate_vault_config, validate_connection_config, validate_update_vault_config, \ + validate_update_connection_config, validate_credentials, validate_log_level +from skyflow.vault.client.client import VaultClient +from skyflow.vault.controller import Vault +from skyflow.vault.controller import Connection + +class Skyflow: + def __init__(self, builder): + self.__builder = builder + log_info(SkyflowMessages.Info.CLIENT_INITIALIZED.value, self.__builder.get_logger()) + + @staticmethod + def builder(): + return Skyflow.Builder() + + def add_vault_config(self, config): + self.__builder._Builder__add_vault_config(config) + return self + + def remove_vault_config(self, vault_id): + self.__builder.remove_vault_config(vault_id) + + def update_vault_config(self,config): + self.__builder.update_vault_config(config) + + def get_vault_config(self, vault_id): + return self.__builder.get_vault_config(vault_id).get("vault_client").get_config() + + def add_connection_config(self, config): + self.__builder._Builder__add_connection_config(config) + return self + + def remove_connection_config(self, connection_id): + self.__builder.remove_connection_config(connection_id) + return self + + def update_connection_config(self, config): + self.__builder.update_connection_config(config) + return self + + def get_connection_config(self, connection_id): + return self.__builder.get_connection_config(connection_id).get("vault_client").get_config() + + def add_skyflow_credentials(self, credentials): + self.__builder._Builder__add_skyflow_credentials(credentials) + return self + + def update_skyflow_credentials(self, credentials): + self.__builder._Builder__add_skyflow_credentials(credentials) + + def set_log_level(self, log_level): + self.__builder._Builder__set_log_level(log_level) + return self + + def get_log_level(self): + return self.__builder._Builder__log_level + + def update_log_level(self, log_level): + self.__builder._Builder__set_log_level(log_level) + + def vault(self, vault_id = None) -> Vault: + vault_config = self.__builder.get_vault_config(vault_id) + return vault_config.get("controller") + + def connection(self, connection_id = None) -> Connection: + connection_config = self.__builder.get_connection_config(connection_id) + return connection_config.get("controller") + + class Builder: + def __init__(self): + self.__vault_configs = OrderedDict() + self.__vault_list = list() + self.__connection_configs = OrderedDict() + self.__connection_list = list() + self.__skyflow_credentials = None + self.__log_level = LogLevel.ERROR + self.__logger = Logger(LogLevel.ERROR) + + def add_vault_config(self, config): + vault_id = config.get("vault_id") + if not isinstance(vault_id, str) or not vault_id: + raise SkyflowError( + SkyflowMessages.Error.INVALID_VAULT_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + if vault_id in [vault.get("vault_id") for vault in self.__vault_list]: + log_info(SkyflowMessages.Info.VAULT_CONFIG_EXISTS.value.format(vault_id), self.__logger) + raise SkyflowError( + SkyflowMessages.Error.VAULT_ID_ALREADY_EXISTS.value.format(vault_id), + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + + self.__vault_list.append(config) + return self + + def remove_vault_config(self, vault_id): + if vault_id in self.__vault_configs.keys(): + self.__vault_configs.pop(vault_id) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_VAULT_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + def update_vault_config(self, config): + validate_update_vault_config(self.__logger, config) + vault_id = config.get("vault_id") + vault_config = self.__vault_configs[vault_id] + vault_config.get("vault_client").update_config(config) + + def get_vault_config(self, vault_id): + if vault_id is None: + if self.__vault_configs: + return next(iter(self.__vault_configs.values())) + raise SkyflowError(SkyflowMessages.Error.EMPTY_VAULT_CONFIGS.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + if vault_id in self.__vault_configs: + return self.__vault_configs.get(vault_id) + log_info(SkyflowMessages.Info.VAULT_CONFIG_DOES_NOT_EXIST.value.format(vault_id), self.__logger) + raise SkyflowError(SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format(vault_id), SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + + def add_connection_config(self, config): + connection_id = config.get("connection_id") + if not isinstance(connection_id, str) or not connection_id: + raise SkyflowError( + SkyflowMessages.Error.INVALID_CONNECTION_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + if connection_id in [connection.get("connection_id") for connection in self.__connection_list]: + log_info(SkyflowMessages.Info.CONNECTION_CONFIG_EXISTS.value.format(connection_id), self.__logger) + raise SkyflowError( + SkyflowMessages.Error.CONNECTION_ID_ALREADY_EXISTS.value.format(connection_id), + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + self.__connection_list.append(config) + return self + + def remove_connection_config(self, connection_id): + if connection_id in self.__connection_configs.keys(): + self.__connection_configs.pop(connection_id) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_CONNECTION_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + def update_connection_config(self, config): + validate_update_connection_config(self.__logger, config) + connection_id = config['connection_id'] + connection_config = self.__connection_configs[connection_id] + connection_config.get("vault_client").update_config(config) + + def get_connection_config(self, connection_id): + if connection_id is None: + if self.__connection_configs: + return next(iter(self.__connection_configs.values())) + + raise SkyflowError(SkyflowMessages.Error.EMPTY_CONNECTION_CONFIGS.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + if connection_id in self.__connection_configs: + return self.__connection_configs.get(connection_id) + log_info(SkyflowMessages.Info.CONNECTION_CONFIG_DOES_NOT_EXIST.value.format(connection_id), self.__logger) + raise SkyflowError(SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format(connection_id), SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + + def add_skyflow_credentials(self, credentials): + self.__skyflow_credentials = credentials + return self + + def set_log_level(self, log_level): + self.__log_level = log_level + return self + + def get_logger(self): + return self.__logger + + def __add_vault_config(self, config): + validate_vault_config(self.__logger, config) + vault_id = config.get("vault_id") + vault_client = VaultClient(config) + self.__vault_configs[vault_id] = { + "vault_client": vault_client, + "controller": Vault(vault_client) + } + log_info(SkyflowMessages.Info.VAULT_CONTROLLER_INITIALIZED.value.format(config.get("vault_id")), self.__logger) + + def __add_connection_config(self, config): + validate_connection_config(self.__logger, config) + connection_id = config.get("connection_id") + vault_client = VaultClient(config) + self.__connection_configs[connection_id] = { + "vault_client": vault_client, + "controller": Connection(vault_client) + } + log_info(SkyflowMessages.Info.CONNECTION_CONTROLLER_INITIALIZED.value.format(config.get("connection_id")), self.__logger) + + def __update_vault_client_logger(self, log_level, logger): + for vault_id, vault_config in self.__vault_configs.items(): + vault_config.get("vault_client").set_logger(log_level,logger) + + for connection_id, connection_config in self.__connection_configs.items(): + connection_config.get("vault_client").set_logger(log_level,logger) + + def __set_log_level(self, log_level): + validate_log_level(self.__logger, log_level) + self.__log_level = log_level + self.__logger.set_log_level(log_level) + self.__update_vault_client_logger(log_level, self.__logger) + log_info(SkyflowMessages.Info.LOGGER_SETUP_DONE.value, self.__logger) + log_info(SkyflowMessages.Info.CURRENT_LOG_LEVEL.value.format(self.__log_level), self.__logger) + + def __add_skyflow_credentials(self, credentials): + if credentials is not None: + self.__skyflow_credentials = credentials + validate_credentials(self.__logger, credentials) + for vault_id, vault_config in self.__vault_configs.items(): + vault_config.get("vault_client").set_common_skyflow_credentials(credentials) + + for connection_id, connection_config in self.__connection_configs.items(): + connection_config.get("vault_client").set_common_skyflow_credentials(self.__skyflow_credentials) + def build(self): + validate_log_level(self.__logger, self.__log_level) + self.__logger.set_log_level(self.__log_level) + + for config in self.__vault_list: + self.__add_vault_config(config) + + for config in self.__connection_list: + self.__add_connection_config(config) + + self.__update_vault_client_logger(self.__log_level, self.__logger) + + self.__add_skyflow_credentials(self.__skyflow_credentials) + + return Skyflow(self) diff --git a/skyflow/error/__init__.py b/skyflow/error/__init__.py new file mode 100644 index 00000000..305c7966 --- /dev/null +++ b/skyflow/error/__init__.py @@ -0,0 +1 @@ +from ._skyflow_error import SkyflowError \ No newline at end of file diff --git a/skyflow/error/_skyflow_error.py b/skyflow/error/_skyflow_error.py new file mode 100644 index 00000000..e23c0133 --- /dev/null +++ b/skyflow/error/_skyflow_error.py @@ -0,0 +1,19 @@ +from skyflow.utils import SkyflowMessages +from skyflow.utils.logger import log_error + +class SkyflowError(Exception): + def __init__(self, + message, + http_code, + request_id = None, + grpc_code = None, + http_status = None, + details = None): + self.message = message + self.http_code = http_code + self.grpc_code = grpc_code + self.http_status = http_status if http_status else SkyflowMessages.HttpStatus.BAD_REQUEST.value + self.details = details + self.request_id = request_id + log_error(message, http_code, request_id, grpc_code, http_status, details) + super().__init__() \ No newline at end of file diff --git a/skyflow/errors/__init__.py b/skyflow/errors/__init__.py deleted file mode 100644 index 70f2f68f..00000000 --- a/skyflow/errors/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._skyflow_errors import SkyflowErrorCodes -from ._skyflow_errors import SkyflowError \ No newline at end of file diff --git a/skyflow/errors/_skyflow_errors.py b/skyflow/errors/_skyflow_errors.py deleted file mode 100644 index 2e792812..00000000 --- a/skyflow/errors/_skyflow_errors.py +++ /dev/null @@ -1,120 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from enum import Enum -from skyflow._utils import log_error - - -class SkyflowErrorCodes(Enum): - INVALID_INPUT = 400 - INVALID_INDEX = 404 - SERVER_ERROR = 500 - PARTIAL_SUCCESS = 500 - TOKENS_GET_COLUMN_NOT_SUPPORTED = 400 - REDACTION_WITH_TOKENS_NOT_SUPPORTED = 400 - - -class SkyflowErrorMessages(Enum): - API_ERROR = "Server returned status code %s" - - FILE_NOT_FOUND = "File at %s not found" - FILE_INVALID_JSON = "File at %s is not in JSON format" - INVALID_CREDENTIALS = "Given credentials are not valid" - INVALID_URL = "Given url '%s' is invalid" - - MISSING_PRIVATE_KEY = "Unable to read Private key" - MISSING_CLIENT_ID = "Unable to read Client ID" - MISSING_KEY_ID = "Unable to read Key ID" - MISSING_TOKEN_URI = "Unable to read Token URI" - - JWT_INVALID_FORMAT = "Private key is not in correct format" - MISSING_ACCESS_TOKEN = "accessToken not present in response" - MISSING_TOKEN_TYPE = "tokenType not present in response" - JWT_DECODE_ERROR = "Invalid access token" - - # vault - RECORDS_KEY_ERROR = "Records key is missing from payload" - FIELDS_KEY_ERROR = "Fields key is missing from payload" - TABLE_KEY_ERROR = "Table key is missing from payload" - TOKEN_KEY_ERROR = "Token key is missing from payload" - IDS_KEY_ERROR = "Id(s) key is missing from payload" - REDACTION_KEY_ERROR = "Redaction key is missing from payload" - UNIQUE_COLUMN_OR_IDS_KEY_ERROR = "Ids or Unique column key is missing from payload" - UPDATE_FIELD_KEY_ERROR = "Atleast one field should be provided to update" - - INVALID_JSON = "Given %s is invalid JSON" - INVALID_RECORDS_TYPE = "Records key has value of type %s, expected list" - INVALID_FIELDS_TYPE = "Fields key has value of type %s, expected dict" - INVALID_TOKENS_TYPE = "Tokens key has value of type %s, expected dict" - EMPTY_TOKENS_IN_INSERT = "Tokens is empty in records" - MISMATCH_OF_FIELDS_AND_TOKENS = "Fields and Tokens object are not matching" - INVALID_TABLE_TYPE = "Table key has value of type %s, expected string" - INVALID_TABLE_TYPE_DELETE = "Table of type string is required at index %s in records array" - INVALID_IDS_TYPE = "Ids key has value of type %s, expected list" - INVALID_ID_TYPE = "Id key has value of type %s, expected string" - INVALID_ID_TYPE_DELETE = "Id of type string is required at index %s in records array" - INVALID_REDACTION_TYPE = "Redaction key has value of type %s, expected Skyflow.Redaction" - INVALID_COLUMN_NAME = "Column name has value of type %s, expected string" - INVALID_COLUMN_VALUE = "Column values has value of type %s, expected list" - EMPTY_RECORDS_IN_DELETE = "records array cannot be empty" - EMPTY_ID_IN_DELETE = "Id cannot be empty in records array" - EMPTY_TABLE_IN_DELETE = "Table cannot be empty in records array" - RECORDS_KEY_NOT_FOUND_DELETE = "records object is required" - - INVALID_REQUEST_BODY = "Given request body is not valid" - INVALID_RESPONSE_BODY = "Given response body is not valid" - INVALID_HEADERS = "Given Request Headers is not valid" - INVALID_PATH_PARAMS = "Given path params are not valid" - INVALID_QUERY_PARAMS = "Given query params are not valid" - INVALID_PATH_PARAM_TYPE = "Path params (key, value) must be of type 'str' given type - (%s, %s)" - INVALID_QUERY_PARAM_TYPE = "Query params (key, value) must be of type 'str' given type - (%s, %s)" - - INVALID_TOKEN_TYPE = "Token key has value of type %s, expected string" - REDACTION_WITH_TOKENS_NOT_SUPPORTED = "Redaction cannot be used when tokens are true in options" - TOKENS_GET_COLUMN_NOT_SUPPORTED = "Column_name or column_values cannot be used with tokens in options" - BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED = "Both skyflow ids and column details (name and/or values) are specified in payload" - - PARTIAL_SUCCESS = "Server returned errors, check SkyflowError.data for more" - - VAULT_ID_INVALID_TYPE = "Expected Vault ID to be str, got %s" - VAULT_URL_INVALID_TYPE = "Expected Vault URL to be str, got %s" - TOKEN_PROVIDER_ERROR = "Expected Token Provider to be function, got %s" - - EMPTY_VAULT_ID = "Vault ID must not be empty" - EMPTY_VAULT_URL = "Vault URL must not be empty" - RESPONSE_NOT_JSON = "Response %s is not valid JSON" - - TOKEN_PROVIDER_INVALID_TOKEN = "Invalid token from tokenProvider" - INVALID_UPSERT_OPTIONS_TYPE = "upsertOptions key has value of type %s, expected list" - EMPTY_UPSERT_OPTIONS_LIST = "upsert option cannot be an empty array, atleast one object of table and column is required" - INVALID_UPSERT_TABLE_TYPE = "upsert object table key has value of type %s, expected string" - INVALID_UPSERT_COLUMN_TYPE = "upsert object column key has value of type %s, expected string" - EMPTY_UPSERT_OPTION_TABLE = "upsert object table value is empty string at index %s, expected non-empty string" - EMPTY_UPSERT_OPTION_COLUMN = "upsert object column value is empty string at index %s, expected non-empty string" - QUERY_KEY_ERROR = "Query key is missing from payload" - INVALID_QUERY_TYPE = "Query key has value of type %s, expected string" - EMPTY_QUERY = "Query key cannot be empty" - INVALID_QUERY_COMMAND = "only SELECT commands are supported, %s command was passed instead" - SERVER_ERROR = "Server returned errors, check SkyflowError.data for more" - - BATCH_INSERT_PARTIAL_SUCCESS = "Insert Operation is partially successful" - BATCH_INSERT_FAILURE = "Insert Operation is unsuccessful" - - INVALID_BYOT_TYPE = "byot option has value of type %s, expected Skyflow.BYOT" - NO_TOKENS_IN_INSERT = "Tokens are not passed in records for byot as %s" - TOKENS_PASSED_FOR_BYOT_DISABLE = "Pass byot parameter with ENABLE for token insertion" - INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT = "For byot as ENABLE_STRICT, tokens should be passed for all fields" - -class SkyflowError(Exception): - def __init__(self, code, message="An Error occured", data={}, interface: str = 'Unknown') -> None: - if type(code) is SkyflowErrorCodes: - self.code = code.value - else: - self.code = code - if type(message) is SkyflowErrorMessages: - self.message = message.value - else: - self.message = message - log_error(self.message, interface) - self.data = data - super().__init__(self.message) diff --git a/tests/service_account/data/empty.json b/skyflow/generated/__init__.py similarity index 100% rename from tests/service_account/data/empty.json rename to skyflow/generated/__init__.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py new file mode 100644 index 00000000..1544b853 --- /dev/null +++ b/skyflow/generated/rest/__init__.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +# flake8: noqa + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +__version__ = "1.0.0" + +# import apis into sdk package +from skyflow.generated.rest.api.audit_api import AuditApi +from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi +from skyflow.generated.rest.api.query_api import QueryApi +from skyflow.generated.rest.api.records_api import RecordsApi +from skyflow.generated.rest.api.tokens_api import TokensApi + +# import ApiClient +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.api_client import ApiClient +from skyflow.generated.rest.configuration import Configuration +from skyflow.generated.rest.exceptions import OpenApiException +from skyflow.generated.rest.exceptions import ApiTypeError +from skyflow.generated.rest.exceptions import ApiValueError +from skyflow.generated.rest.exceptions import ApiKeyError +from skyflow.generated.rest.exceptions import ApiAttributeError +from skyflow.generated.rest.exceptions import ApiException + +# import models into sdk package +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from skyflow.generated.rest.models.request_action_type import RequestActionType +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_card import V1Card +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig diff --git a/skyflow/generated/rest/api/__init__.py b/skyflow/generated/rest/api/__init__.py new file mode 100644 index 00000000..01b15fdb --- /dev/null +++ b/skyflow/generated/rest/api/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +# import apis into api package +from skyflow.generated.rest.api.audit_api import AuditApi +from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi +from skyflow.generated.rest.api.query_api import QueryApi +from skyflow.generated.rest.api.records_api import RecordsApi +from skyflow.generated.rest.api.tokens_api import TokensApi +from skyflow.generated.rest.api.authentication_api import AuthenticationApi diff --git a/skyflow/generated/rest/api/audit_api.py b/skyflow/generated/rest/api/audit_api.py new file mode 100644 index 00000000..dc6de1fe --- /dev/null +++ b/skyflow/generated/rest/api/audit_api.py @@ -0,0 +1,848 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictInt, StrictStr, field_validator +from typing import Optional +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class AuditApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def audit_service_list_audit_events( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1AuditResponse: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def audit_service_list_audit_events_with_http_info( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1AuditResponse]: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def audit_service_list_audit_events_without_preload_content( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _audit_service_list_audit_events_serialize( + self, + filter_ops_account_id, + filter_ops_context_change_id, + filter_ops_context_request_id, + filter_ops_context_trace_id, + filter_ops_context_session_id, + filter_ops_context_actor, + filter_ops_context_actor_type, + filter_ops_context_access_type, + filter_ops_context_ip_address, + filter_ops_context_origin, + filter_ops_context_auth_mode, + filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id, + filter_ops_workspace_id, + filter_ops_vault_id, + filter_ops_resource_ids, + filter_ops_action_type, + filter_ops_resource_type, + filter_ops_tags, + filter_ops_response_code, + filter_ops_start_time, + filter_ops_end_time, + filter_ops_api_name, + filter_ops_response_message, + filter_ops_http_method, + filter_ops_http_uri, + sort_ops_sort_by, + sort_ops_order_by, + after_ops_timestamp, + after_ops_change_id, + limit, + offset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filter_ops_context_change_id is not None: + + _query_params.append(('filterOps.context.changeID', filter_ops_context_change_id)) + + if filter_ops_context_request_id is not None: + + _query_params.append(('filterOps.context.requestID', filter_ops_context_request_id)) + + if filter_ops_context_trace_id is not None: + + _query_params.append(('filterOps.context.traceID', filter_ops_context_trace_id)) + + if filter_ops_context_session_id is not None: + + _query_params.append(('filterOps.context.sessionID', filter_ops_context_session_id)) + + if filter_ops_context_actor is not None: + + _query_params.append(('filterOps.context.actor', filter_ops_context_actor)) + + if filter_ops_context_actor_type is not None: + + _query_params.append(('filterOps.context.actorType', filter_ops_context_actor_type)) + + if filter_ops_context_access_type is not None: + + _query_params.append(('filterOps.context.accessType', filter_ops_context_access_type)) + + if filter_ops_context_ip_address is not None: + + _query_params.append(('filterOps.context.ipAddress', filter_ops_context_ip_address)) + + if filter_ops_context_origin is not None: + + _query_params.append(('filterOps.context.origin', filter_ops_context_origin)) + + if filter_ops_context_auth_mode is not None: + + _query_params.append(('filterOps.context.authMode', filter_ops_context_auth_mode)) + + if filter_ops_context_jwt_id is not None: + + _query_params.append(('filterOps.context.jwtID', filter_ops_context_jwt_id)) + + if filter_ops_context_bearer_token_context_id is not None: + + _query_params.append(('filterOps.context.bearerTokenContextID', filter_ops_context_bearer_token_context_id)) + + if filter_ops_parent_account_id is not None: + + _query_params.append(('filterOps.parentAccountID', filter_ops_parent_account_id)) + + if filter_ops_account_id is not None: + + _query_params.append(('filterOps.accountID', filter_ops_account_id)) + + if filter_ops_workspace_id is not None: + + _query_params.append(('filterOps.workspaceID', filter_ops_workspace_id)) + + if filter_ops_vault_id is not None: + + _query_params.append(('filterOps.vaultID', filter_ops_vault_id)) + + if filter_ops_resource_ids is not None: + + _query_params.append(('filterOps.resourceIDs', filter_ops_resource_ids)) + + if filter_ops_action_type is not None: + + _query_params.append(('filterOps.actionType', filter_ops_action_type)) + + if filter_ops_resource_type is not None: + + _query_params.append(('filterOps.resourceType', filter_ops_resource_type)) + + if filter_ops_tags is not None: + + _query_params.append(('filterOps.tags', filter_ops_tags)) + + if filter_ops_response_code is not None: + + _query_params.append(('filterOps.responseCode', filter_ops_response_code)) + + if filter_ops_start_time is not None: + + _query_params.append(('filterOps.startTime', filter_ops_start_time)) + + if filter_ops_end_time is not None: + + _query_params.append(('filterOps.endTime', filter_ops_end_time)) + + if filter_ops_api_name is not None: + + _query_params.append(('filterOps.apiName', filter_ops_api_name)) + + if filter_ops_response_message is not None: + + _query_params.append(('filterOps.responseMessage', filter_ops_response_message)) + + if filter_ops_http_method is not None: + + _query_params.append(('filterOps.httpMethod', filter_ops_http_method)) + + if filter_ops_http_uri is not None: + + _query_params.append(('filterOps.httpURI', filter_ops_http_uri)) + + if sort_ops_sort_by is not None: + + _query_params.append(('sortOps.sortBy', sort_ops_sort_by)) + + if sort_ops_order_by is not None: + + _query_params.append(('sortOps.orderBy', sort_ops_order_by)) + + if after_ops_timestamp is not None: + + _query_params.append(('afterOps.timestamp', after_ops_timestamp)) + + if after_ops_change_id is not None: + + _query_params.append(('afterOps.changeID', after_ops_change_id)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/audit/events', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/authentication_api.py b/skyflow/generated/rest/api/authentication_api.py new file mode 100644 index 00000000..8abbbf67 --- /dev/null +++ b/skyflow/generated/rest/api/authentication_api.py @@ -0,0 +1,319 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest +from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class AuthenticationApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def authentication_service_get_auth_token( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetAuthTokenResponse: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def authentication_service_get_auth_token_with_http_info( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetAuthTokenResponse]: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def authentication_service_get_auth_token_without_preload_content( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _authentication_service_get_auth_token_serialize( + self, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/auth/sa/oauth/token', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/bin_lookup_api.py b/skyflow/generated/rest/api/bin_lookup_api.py new file mode 100644 index 00000000..1bb3e64b --- /dev/null +++ b/skyflow/generated/rest/api/bin_lookup_api.py @@ -0,0 +1,315 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class BINLookupApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def b_in_list_service_list_cards_of_bin( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BINListResponse: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def b_in_list_service_list_cards_of_bin_with_http_info( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BINListResponse]: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def b_in_list_service_list_cards_of_bin_without_preload_content( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _b_in_list_service_list_cards_of_bin_serialize( + self, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/card_lookup', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/query_api.py b/skyflow/generated/rest/api/query_api.py new file mode 100644 index 00000000..edf04f27 --- /dev/null +++ b/skyflow/generated/rest/api/query_api.py @@ -0,0 +1,330 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class QueryApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def query_service_execute_query( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetQueryResponse: + """Execute Query + + Returns records for a valid SQL query. This endpoint
  • Can return redacted record values.
  • Supports only the SELECT command.
  • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
  • Can't modify the vault or perform transactions.
  • Can't return tokens.
  • Can't return file download or render URLs.
  • Doesn't support the WHERE keyword with columns using transient tokenization.
  • Doesn't support `?` conditional for columns with column-level encryption disabled.
    • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def query_service_execute_query_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetQueryResponse]: + """Execute Query + + Returns records for a valid SQL query. This endpoint
      • Can return redacted record values.
      • Supports only the SELECT command.
      • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
      • Can't modify the vault or perform transactions.
      • Can't return tokens.
      • Can't return file download or render URLs.
      • Doesn't support the WHERE keyword with columns using transient tokenization.
      • Doesn't support `?` conditional for columns with column-level encryption disabled.
        • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def query_service_execute_query_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Execute Query + + Returns records for a valid SQL query. This endpoint
          • Can return redacted record values.
          • Supports only the SELECT command.
          • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
          • Can't modify the vault or perform transactions.
          • Can't return tokens.
          • Can't return file download or render URLs.
          • Doesn't support the WHERE keyword with columns using transient tokenization.
          • Doesn't support `?` conditional for columns with column-level encryption disabled.
            • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _query_service_execute_query_serialize( + self, + vault_id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/query', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/records_api.py b/skyflow/generated/rest/api/records_api.py new file mode 100644 index 00000000..ae9a2c29 --- /dev/null +++ b/skyflow/generated/rest/api/records_api.py @@ -0,0 +1,3310 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
              • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
              • Production: https://*identifier*.vault.skyflowapis.com
              When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictBytes, StrictStr, field_validator +from typing import List, Optional, Union +from typing_extensions import Annotated +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class RecordsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def file_service_delete_file( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DeleteFileResponse: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_delete_file_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DeleteFileResponse]: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_delete_file_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_delete_file_serialize( + self, + vault_id, + table_name, + id, + column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if table_name is not None: + _path_params['tableName'] = table_name + if id is not None: + _path_params['ID'] = id + if column_name is not None: + _path_params['columnName'] = column_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def file_service_get_file_scan_status( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetFileScanStatusResponse: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_get_file_scan_status_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetFileScanStatusResponse]: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_get_file_scan_status_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_get_file_scan_status_serialize( + self, + vault_id, + table_name, + id, + column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if table_name is not None: + _path_params['tableName'] = table_name + if id is not None: + _path_params['ID'] = id + if column_name is not None: + _path_params['columnName'] = column_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}/scan-status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def file_service_upload_file( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1UpdateRecordResponse: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_upload_file_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1UpdateRecordResponse]: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_upload_file_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_upload_file_serialize( + self, + vault_id, + object_name, + id, + file_column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + if file_column_name is not None: + _files['fileColumnName'] = file_column_name + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}/files', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_batch_operation( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BatchOperationResponse: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '207': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_batch_operation_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BatchOperationResponse]: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_batch_operation_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_batch_operation_serialize( + self, + vault_id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_bulk_delete_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BulkDeleteRecordResponse: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_bulk_delete_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BulkDeleteRecordResponse]: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_bulk_delete_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_bulk_delete_record_serialize( + self, + vault_id, + object_name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_bulk_get_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BulkGetRecordResponse: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_bulk_get_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BulkGetRecordResponse]: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_bulk_get_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_bulk_get_record_serialize( + self, + vault_id, + object_name, + skyflow_ids, + redaction, + tokenization, + fields, + offset, + limit, + download_url, + column_name, + column_values, + order_by, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'skyflow_ids': 'multi', + 'fields': 'multi', + 'column_values': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + if skyflow_ids is not None: + + _query_params.append(('skyflow_ids', skyflow_ids)) + + if redaction is not None: + + _query_params.append(('redaction', redaction)) + + if tokenization is not None: + + _query_params.append(('tokenization', tokenization)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if download_url is not None: + + _query_params.append(('downloadURL', download_url)) + + if column_name is not None: + + _query_params.append(('column_name', column_name)) + + if column_values is not None: + + _query_params.append(('column_values', column_values)) + + if order_by is not None: + + _query_params.append(('order_by', order_by)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_delete_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DeleteRecordResponse: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_delete_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DeleteRecordResponse]: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_delete_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_delete_record_serialize( + self, + vault_id, + object_name, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_get_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1FieldRecords: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_get_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1FieldRecords]: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_get_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_get_record_serialize( + self, + vault_id, + object_name, + id, + redaction, + tokenization, + fields, + download_url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'fields': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + if redaction is not None: + + _query_params.append(('redaction', redaction)) + + if tokenization is not None: + + _query_params.append(('tokenization', tokenization)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + if download_url is not None: + + _query_params.append(('downloadURL', download_url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_insert_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1InsertRecordResponse: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_insert_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1InsertRecordResponse]: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_insert_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_insert_record_serialize( + self, + vault_id, + object_name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_update_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1UpdateRecordResponse: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_update_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1UpdateRecordResponse]: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_update_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_update_record_serialize( + self, + vault_id, + object_name, + id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/tokens_api.py b/skyflow/generated/rest/api/tokens_api.py new file mode 100644 index 00000000..e21e7935 --- /dev/null +++ b/skyflow/generated/rest/api/tokens_api.py @@ -0,0 +1,623 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class TokensApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def record_service_detokenize( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DetokenizeResponse: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '207': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_detokenize_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DetokenizeResponse]: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_detokenize_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_detokenize_serialize( + self, + vault_id, + detokenize_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if detokenize_payload is not None: + _body_params = detokenize_payload + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/detokenize', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_tokenize( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1TokenizeResponse: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_tokenize_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1TokenizeResponse]: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_tokenize_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_tokenize_serialize( + self, + vault_id, + tokenize_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if tokenize_payload is not None: + _body_params = tokenize_payload + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/tokenize', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api_client.py b/skyflow/generated/rest/api_client.py new file mode 100644 index 00000000..8aa5e6a9 --- /dev/null +++ b/skyflow/generated/rest/api_client.py @@ -0,0 +1,789 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import datetime +from dateutil.parser import parse +from enum import Enum +import decimal +import json +import mimetypes +import os +import re +import tempfile + +from urllib.parse import quote +from typing import Tuple, Optional, List, Dict, Union +from pydantic import SecretStr + +from skyflow.generated.rest.configuration import Configuration +from skyflow.generated.rest.api_response import ApiResponse, T as ApiResponseT +import skyflow.generated.rest.models +from skyflow.generated.rest import rest +from skyflow.generated.rest.exceptions import ( + ApiValueError, + ApiException, + BadRequestException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException +) + +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] + +class ApiClient: + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, # TODO remove as only py3 is supported? + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'decimal': decimal.Decimal, + 'object': object, + } + _pool = None + + def __init__( + self, + configuration=None, + header_name=None, + header_value=None, + cookie=None + ) -> None: + # use default configuration if none is provided + if configuration is None: + configuration = Configuration.get_default() + self.configuration = configuration + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + + def param_serialize( + self, + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None + ) -> RequestSerialized: + + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict( + self.parameters_to_tuples(header_params,collection_formats) + ) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples( + path_params, + collection_formats + ) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples( + post_params, + collection_formats + ) + if files: + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth( + header_params, + query_params, + auth_settings, + resource_path, + method, + body, + request_auth=_request_auth + ) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None or self.configuration.ignore_operation_servers: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query( + query_params, + collection_formats + ) + url += "?" + url_query + + return method, url, header_params, body, post_params + + + def call_api( + self, + method, + url, + header_params=None, + body=None, + post_params=None, + _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + + try: + # perform request and return response + response_data = self.rest_client.request( + method, url, + headers=header_params, + body=body, post_params=post_params, + _request_timeout=_request_timeout + ) + + except ApiException as e: + raise e + + return response_data + + def response_deserialize( + self, + response_data: rest.RESTResponse, + response_types_map: Optional[Dict[str, ApiResponseT]]=None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ + + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + assert response_data.data is not None, msg + + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) + + # deserialize response data + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None + content_type = response_data.getheader('content-type') + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type, content_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) + + return ApiResponse( + status_code = response_data.status, + data = return_data, + headers = response_data.getheaders(), + raw_data = response_data.data + ) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is SecretStr, return obj.get_secret_value() + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is decimal.Decimal return string representation. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, Enum): + return obj.value + elif isinstance(obj, SecretStr): + return obj.get_secret_value() + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [ + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ] + elif isinstance(obj, tuple): + return tuple( + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + elif isinstance(obj, decimal.Decimal): + return str(obj) + + elif isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')): + obj_dict = obj.to_dict() + else: + obj_dict = obj.__dict__ + + return { + key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items() + } + + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + :param content_type: content type of response. + + :return: deserialized object. + """ + + # fetch data from response object + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif content_type.startswith("application/json"): + if response_text == "": + data = "" + else: + data = json.loads(response_text) + elif content_type.startswith("text/plain"): + data = response_text + else: + raise ApiException( + status=0, + reason="Unsupported content type: {0}".format(content_type) + ) + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith('List['): + m = re.match(r'List\[(.*)]', klass) + assert m is not None, "Malformed List type definition" + sub_kls = m.group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('Dict['): + m = re.match(r'Dict\[([^,]*), (.*)]', klass) + assert m is not None, "Malformed Dict type definition" + sub_kls = m.group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(skyflow.generated.rest.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + elif klass == decimal.Decimal: + return decimal.Decimal(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) + else: + return self.__deserialize_model(data, klass) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, str(value)) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(quote(str(value)) for value in v)) + ) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(map(str, item)) for item in new_params]) + + def files_parameters(self, files: Dict[str, Union[str, bytes]]): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + for k, v in files.items(): + if isinstance(v, str): + with open(v, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + elif isinstance(v, bytes): + filename = k + filedata = v + else: + raise ValueError("Unsupported file value") + mimetype = ( + mimetypes.guess_type(filename)[0] + or 'application/octet-stream' + ) + params.append( + tuple([k, tuple([filename, filedata, mimetype])]) + ) + return params + + def select_header_accept(self, accepts: List[str]) -> Optional[str]: + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return None + + for accept in accepts: + if re.search('json', accept, re.IGNORECASE): + return accept + + return accepts[0] + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return None + + for content_type in content_types: + if re.search('json', content_type, re.IGNORECASE): + return content_type + + return content_types[0] + + def update_params_for_auth( + self, + headers, + queries, + auth_settings, + resource_path, + method, + body, + request_auth=None + ) -> None: + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param request_auth: if set, the provided settings will + override the token in the configuration. + """ + if not auth_settings: + return + + if request_auth: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + request_auth + ) + else: + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + auth_setting + ) + + def _apply_auth_params( + self, + headers, + queries, + resource_path, + method, + body, + auth_setting + ) -> None: + """Updates the request parameters based on a single auth_setting + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param auth_setting: auth settings for the endpoint + """ + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + queries.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + handle file downloading + save response body into a tmp file and return the instance + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + m = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition + ) + assert m is not None, "Unexpected 'content-disposition' header value" + filename = m.group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. + + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as `{1}`" + .format(data, klass) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/skyflow/generated/rest/api_response.py b/skyflow/generated/rest/api_response.py new file mode 100644 index 00000000..9bc7c11f --- /dev/null +++ b/skyflow/generated/rest/api_response.py @@ -0,0 +1,21 @@ +"""API response object.""" + +from __future__ import annotations +from typing import Optional, Generic, Mapping, TypeVar +from pydantic import Field, StrictInt, StrictBytes, BaseModel + +T = TypeVar("T") + +class ApiResponse(BaseModel, Generic[T]): + """ + API response object + """ + + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") + + model_config = { + "arbitrary_types_allowed": True + } diff --git a/skyflow/generated/rest/configuration.py b/skyflow/generated/rest/configuration.py new file mode 100644 index 00000000..5d983650 --- /dev/null +++ b/skyflow/generated/rest/configuration.py @@ -0,0 +1,464 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import copy +import logging +from logging import FileHandler +import multiprocessing +import sys +from typing import Optional +import urllib3 + +import http.client as httplib + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', + 'minimum', 'exclusiveMinimum', 'maxLength', + 'minLength', 'pattern', 'maxItems', 'minItems' +} + +class Configuration: + """This class contains various settings of the API client. + + :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer). + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication. + :param password: Password for HTTP basic authentication. + :param access_token: Access token. + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum + values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format. + :param retries: Number of retries for API requests. + + :Example: + """ + + _default = None + + def __init__(self, host=None, + api_key=None, api_key_prefix=None, + username=None, password=None, + access_token=None, + server_index=None, server_variables=None, + server_operation_index=None, server_operation_variables=None, + ignore_operation_servers=False, + ssl_ca_cert=None, + retries=None, + *, + debug: Optional[bool] = None + ) -> None: + """Constructor + """ + self._base_path = "https://identifier.vault.skyflowapis.com" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.access_token = access_token + """Access token + """ + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("skyflow.generated.rest") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler: Optional[FileHandler] = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + if debug is not None: + self.debug = debug + else: + self.__debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + self.tls_server_name = None + """SSL/TLS Server Name Indication (SNI) + Set this to the SNI value expected by the server. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy: Optional[str] = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = retries + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + self.socket_options = None + """Options to pass down to the underlying urllib3 socket + """ + + self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" + """datetime format + """ + + self.date_format = "%Y-%m-%d" + """date format + """ + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = default + + @classmethod + def get_default_copy(cls): + """Deprecated. Please use `get_default` instead. + + Deprecated. Please use `get_default` instead. + + :return: The configuration object. + """ + return cls.get_default() + + @classmethod + def get_default(cls): + """Return the default configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration. + + :return: The configuration object. + """ + if cls._default is None: + cls._default = Configuration() + return cls._default + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if self.access_token is not None: + auth['Bearer'] = { + 'type': 'bearer', + 'in': 'header', + 'format': 'JWT', + 'key': 'Authorization', + 'value': 'Bearer ' + self.access_token + } + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: v1\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "https://identifier.vault.skyflowapis.com", + 'description': "Production", + }, + { + 'url': "https://identifier.vault.skyflowapis-preview.com", + 'description': "Sandbox", + } + ] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server.get('variables', {}).items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/skyflow/generated/rest/exceptions.py b/skyflow/generated/rest/exceptions.py new file mode 100644 index 00000000..ef323e2e --- /dev/null +++ b/skyflow/generated/rest/exceptions.py @@ -0,0 +1,200 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from typing import Any, Optional +from typing_extensions import Self + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None) -> None: + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + + if http_resp: + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode('utf-8') + except Exception: + pass + self.headers = http_resp.getheaders() + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) + + return error_message + + +class BadRequestException(ApiException): + pass + + +class NotFoundException(ApiException): + pass + + +class UnauthorizedException(ApiException): + pass + + +class ForbiddenException(ApiException): + pass + + +class ServiceException(ApiException): + pass + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/skyflow/generated/rest/models/__init__.py b/skyflow/generated/rest/models/__init__.py new file mode 100644 index 00000000..379cf733 --- /dev/null +++ b/skyflow/generated/rest/models/__init__.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +# flake8: noqa +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +# import models into model package +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from skyflow.generated.rest.models.request_action_type import RequestActionType +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_card import V1Card +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig + +from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest +from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse \ No newline at end of file diff --git a/skyflow/generated/rest/models/audit_event_audit_resource_type.py b/skyflow/generated/rest/models/audit_event_audit_resource_type.py new file mode 100644 index 00000000..c425dce7 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_audit_resource_type.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class AuditEventAuditResourceType(str, Enum): + """ + Type of the resource. + """ + + """ + allowed enum values + """ + NONE_API = 'NONE_API' + ACCOUNT = 'ACCOUNT' + AUDIT = 'AUDIT' + BASE_DATA_TYPE = 'BASE_DATA_TYPE' + FIELD_TEMPLATE = 'FIELD_TEMPLATE' + FILE = 'FILE' + KEY = 'KEY' + POLICY = 'POLICY' + PROTO_PARSE = 'PROTO_PARSE' + RECORD = 'RECORD' + ROLE = 'ROLE' + RULE = 'RULE' + SECRET = 'SECRET' + SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' + TOKEN = 'TOKEN' + USER = 'USER' + VAULT = 'VAULT' + VAULT_TEMPLATE = 'VAULT_TEMPLATE' + WORKSPACE = 'WORKSPACE' + TABLE = 'TABLE' + POLICY_TEMPLATE = 'POLICY_TEMPLATE' + MEMBER = 'MEMBER' + TAG = 'TAG' + CONNECTION = 'CONNECTION' + MIGRATION = 'MIGRATION' + SCHEDULED_JOB = 'SCHEDULED_JOB' + JOB = 'JOB' + COLUMN_NAME = 'COLUMN_NAME' + NETWORK_TOKEN = 'NETWORK_TOKEN' + SUBSCRIPTION = 'SUBSCRIPTION' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of AuditEventAuditResourceType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/audit_event_context.py b/skyflow/generated/rest/models/audit_event_context.py new file mode 100644 index 00000000..af280eb0 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_context.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventContext(BaseModel): + """ + Context for an audit event. + """ # noqa: E501 + change_id: Optional[StrictStr] = Field(default=None, description="ID for the audit event.", alias="changeID") + request_id: Optional[StrictStr] = Field(default=None, description="ID for the request that caused the event.", alias="requestID") + trace_id: Optional[StrictStr] = Field(default=None, description="ID for the request set by the service that received the request.", alias="traceID") + session_id: Optional[StrictStr] = Field(default=None, description="ID for the session in which the request was sent.", alias="sessionID") + actor: Optional[StrictStr] = Field(default=None, description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.") + actor_type: Optional[V1MemberType] = Field(default=V1MemberType.NONE, alias="actorType") + access_type: Optional[ContextAccessType] = Field(default=ContextAccessType.ACCESS_NONE, alias="accessType") + ip_address: Optional[StrictStr] = Field(default=None, description="IP Address of the client that made the request.", alias="ipAddress") + origin: Optional[StrictStr] = Field(default=None, description="HTTP Origin request header (including scheme, hostname, and port) of the request.") + auth_mode: Optional[ContextAuthMode] = Field(default=ContextAuthMode.AUTH_NONE, alias="authMode") + jwt_id: Optional[StrictStr] = Field(default=None, description="ID of the JWT token.", alias="jwtID") + bearer_token_context_id: Optional[StrictStr] = Field(default=None, description="Embedded User Context.", alias="bearerTokenContextID") + __properties: ClassVar[List[str]] = ["changeID", "requestID", "traceID", "sessionID", "actor", "actorType", "accessType", "ipAddress", "origin", "authMode", "jwtID", "bearerTokenContextID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "changeID": obj.get("changeID"), + "requestID": obj.get("requestID"), + "traceID": obj.get("traceID"), + "sessionID": obj.get("sessionID"), + "actor": obj.get("actor"), + "actorType": obj.get("actorType") if obj.get("actorType") is not None else V1MemberType.NONE, + "accessType": obj.get("accessType") if obj.get("accessType") is not None else ContextAccessType.ACCESS_NONE, + "ipAddress": obj.get("ipAddress"), + "origin": obj.get("origin"), + "authMode": obj.get("authMode") if obj.get("authMode") is not None else ContextAuthMode.AUTH_NONE, + "jwtID": obj.get("jwtID"), + "bearerTokenContextID": obj.get("bearerTokenContextID") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/audit_event_data.py b/skyflow/generated/rest/models/audit_event_data.py new file mode 100644 index 00000000..5a463f00 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_data.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventData(BaseModel): + """ + Any Sensitive data that needs to be wrapped. + """ # noqa: E501 + content: Optional[StrictStr] = Field(default=None, description="The entire body of the data requested or the query fired.") + __properties: ClassVar[List[str]] = ["content"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "content": obj.get("content") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/audit_event_http_info.py b/skyflow/generated/rest/models/audit_event_http_info.py new file mode 100644 index 00000000..b3b2f074 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_http_info.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventHTTPInfo(BaseModel): + """ + AuditEventHTTPInfo + """ # noqa: E501 + uri: Optional[StrictStr] = Field(default=None, description="The http URI that is used.", alias="URI") + method: Optional[StrictStr] = Field(default=None, description="http method used.") + __properties: ClassVar[List[str]] = ["URI", "method"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventHTTPInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventHTTPInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "URI": obj.get("URI"), + "method": obj.get("method") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/batch_record_method.py b/skyflow/generated/rest/models/batch_record_method.py new file mode 100644 index 00000000..a2892049 --- /dev/null +++ b/skyflow/generated/rest/models/batch_record_method.py @@ -0,0 +1,41 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class BatchRecordMethod(str, Enum): + """ + Method of the operation. + """ + + """ + allowed enum values + """ + NONE = 'NONE' + POST = 'POST' + PUT = 'PUT' + GET = 'GET' + DELETE = 'DELETE' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of BatchRecordMethod from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/context_access_type.py b/skyflow/generated/rest/models/context_access_type.py new file mode 100644 index 00000000..e00a9df9 --- /dev/null +++ b/skyflow/generated/rest/models/context_access_type.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ContextAccessType(str, Enum): + """ + Type of access for the request. + """ + + """ + allowed enum values + """ + ACCESS_NONE = 'ACCESS_NONE' + API = 'API' + SQL = 'SQL' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ContextAccessType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/context_auth_mode.py b/skyflow/generated/rest/models/context_auth_mode.py new file mode 100644 index 00000000..fb803e7a --- /dev/null +++ b/skyflow/generated/rest/models/context_auth_mode.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ContextAuthMode(str, Enum): + """ + Authentication mode the `actor` used. + """ + + """ + allowed enum values + """ + AUTH_NONE = 'AUTH_NONE' + OKTA_JWT = 'OKTA_JWT' + SERVICE_ACCOUNT_JWT = 'SERVICE_ACCOUNT_JWT' + PAT_JWT = 'PAT_JWT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ContextAuthMode from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/detokenize_record_response_value_type.py b/skyflow/generated/rest/models/detokenize_record_response_value_type.py new file mode 100644 index 00000000..62460141 --- /dev/null +++ b/skyflow/generated/rest/models/detokenize_record_response_value_type.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class DetokenizeRecordResponseValueType(str, Enum): + """ + DetokenizeRecordResponseValueType + """ + + """ + allowed enum values + """ + NONE = 'NONE' + STRING = 'STRING' + INTEGER = 'INTEGER' + FLOAT = 'FLOAT' + BOOL = 'BOOL' + DATETIME = 'DATETIME' + JSON = 'JSON' + ARRAY = 'ARRAY' + DATE = 'DATE' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of DetokenizeRecordResponseValueType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/googlerpc_status.py b/skyflow/generated/rest/models/googlerpc_status.py new file mode 100644 index 00000000..b9914c58 --- /dev/null +++ b/skyflow/generated/rest/models/googlerpc_status.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from typing import Optional, Set +from typing_extensions import Self + +class GooglerpcStatus(BaseModel): + """ + GooglerpcStatus + """ # noqa: E501 + code: Optional[StrictInt] = None + message: Optional[StrictStr] = None + details: Optional[List[ProtobufAny]] = None + __properties: ClassVar[List[str]] = ["code", "message", "details"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GooglerpcStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item_details in self.details: + if _item_details: + _items.append(_item_details.to_dict()) + _dict['details'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GooglerpcStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "code": obj.get("code"), + "message": obj.get("message"), + "details": [ProtobufAny.from_dict(_item) for _item in obj["details"]] if obj.get("details") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/protobuf_any.py b/skyflow/generated/rest/models/protobuf_any.py new file mode 100644 index 00000000..e29a6356 --- /dev/null +++ b/skyflow/generated/rest/models/protobuf_any.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ProtobufAny(BaseModel): + """ + ProtobufAny + """ # noqa: E501 + type: Optional[StrictStr] = Field(default=None, alias="@type") + additional_properties: Dict[str, Any] = {} + __properties: ClassVar[List[str]] = ["@type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ProtobufAny from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + * Fields in `self.additional_properties` are added to the output dict. + """ + excluded_fields: Set[str] = set([ + "additional_properties", + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # puts key-value pairs in additional_properties in the top level + if self.additional_properties is not None: + for _key, _value in self.additional_properties.items(): + _dict[_key] = _value + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ProtobufAny from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "@type": obj.get("@type") + }) + # store additional fields in additional_properties + for _key in obj.keys(): + if _key not in cls.__properties: + _obj.additional_properties[_key] = obj.get(_key) + + return _obj + + diff --git a/skyflow/generated/rest/models/query_service_execute_query_body.py b/skyflow/generated/rest/models/query_service_execute_query_body.py new file mode 100644 index 00000000..fa6a9bf9 --- /dev/null +++ b/skyflow/generated/rest/models/query_service_execute_query_body.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class QueryServiceExecuteQueryBody(BaseModel): + """ + QueryServiceExecuteQueryBody + """ # noqa: E501 + query: Optional[StrictStr] = Field(default=None, description="The SQL query to execute.

Supported commands:
  • SELECT
Supported operators:
  • >
  • <
  • =
  • AND
  • OR
  • NOT
  • LIKE
  • ILIKE
  • NULL
  • NOT NULL
Supported keywords:
  • FROM
  • JOIN
  • INNER JOIN
  • LEFT OUTER JOIN
  • LEFT JOIN
  • RIGHT OUTER JOIN
  • RIGHT JOIN
  • FULL OUTER JOIN
  • FULL JOIN
  • OFFSET
  • LIMIT
  • WHERE
Supported functions:
  • AVG()
  • SUM()
  • COUNT()
  • MIN()
  • MAX()
  • REDACTION()
") + __properties: ClassVar[List[str]] = ["query"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QueryServiceExecuteQueryBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QueryServiceExecuteQueryBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "query": obj.get("query") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_batch_operation_body.py b/skyflow/generated/rest/models/record_service_batch_operation_body.py new file mode 100644 index 00000000..fe6ef37e --- /dev/null +++ b/skyflow/generated/rest/models/record_service_batch_operation_body.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_byot import V1BYOT +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceBatchOperationBody(BaseModel): + """ + RecordServiceBatchOperationBody + """ # noqa: E501 + records: Optional[List[V1BatchRecord]] = Field(default=None, description="Record operations to perform.") + continue_on_error: Optional[StrictBool] = Field(default=None, description="Continue performing operations on partial errors.", alias="continueOnError") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["records", "continueOnError", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceBatchOperationBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceBatchOperationBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1BatchRecord.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, + "continueOnError": obj.get("continueOnError"), + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py b/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py new file mode 100644 index 00000000..b12f79a8 --- /dev/null +++ b/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceBulkDeleteRecordBody(BaseModel): + """ + RecordServiceBulkDeleteRecordBody + """ # noqa: E501 + skyflow_ids: Optional[List[StrictStr]] = Field(default=None, description="`skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table.") + __properties: ClassVar[List[str]] = ["skyflow_ids"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceBulkDeleteRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceBulkDeleteRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_ids": obj.get("skyflow_ids") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_insert_record_body.py b/skyflow/generated/rest/models/record_service_insert_record_body.py new file mode 100644 index 00000000..c067fe25 --- /dev/null +++ b/skyflow/generated/rest/models/record_service_insert_record_body.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceInsertRecordBody(BaseModel): + """ + RecordServiceInsertRecordBody + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="Record values and tokens.") + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") + upsert: Optional[StrictStr] = Field(default=None, description="Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.") + homogeneous: Optional[StrictBool] = Field(default=False, description="If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert.") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["records", "tokenization", "upsert", "homogeneous", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceInsertRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceInsertRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, + "tokenization": obj.get("tokenization"), + "upsert": obj.get("upsert"), + "homogeneous": obj.get("homogeneous") if obj.get("homogeneous") is not None else False, + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_update_record_body.py b/skyflow/generated/rest/models/record_service_update_record_body.py new file mode 100644 index 00000000..627a2f6e --- /dev/null +++ b/skyflow/generated/rest/models/record_service_update_record_body.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceUpdateRecordBody(BaseModel): + """ + RecordServiceUpdateRecordBody + """ # noqa: E501 + record: Optional[V1FieldRecords] = None + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["record", "tokenization", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceUpdateRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of record + if self.record: + _dict['record'] = self.record.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceUpdateRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "record": V1FieldRecords.from_dict(obj["record"]) if obj.get("record") is not None else None, + "tokenization": obj.get("tokenization"), + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/redaction_enum_redaction.py b/skyflow/generated/rest/models/redaction_enum_redaction.py new file mode 100644 index 00000000..82f1a16e --- /dev/null +++ b/skyflow/generated/rest/models/redaction_enum_redaction.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class RedactionEnumREDACTION(str, Enum): + """ + Redaction type. Subject to policies assigned to the API caller. When used for detokenization, only supported for vaults that support [column groups](/tokenization-column-groups/). + """ + + """ + allowed enum values + """ + DEFAULT = 'DEFAULT' + REDACTED = 'REDACTED' + MASKED = 'MASKED' + PLAIN_TEXT = 'PLAIN_TEXT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RedactionEnumREDACTION from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/request_action_type.py b/skyflow/generated/rest/models/request_action_type.py new file mode 100644 index 00000000..2137d2eb --- /dev/null +++ b/skyflow/generated/rest/models/request_action_type.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class RequestActionType(str, Enum): + """ + RequestActionType + """ + + """ + allowed enum values + """ + NONE = 'NONE' + ASSIGN = 'ASSIGN' + CREATE = 'CREATE' + DELETE = 'DELETE' + EXECUTE = 'EXECUTE' + LIST = 'LIST' + READ = 'READ' + UNASSIGN = 'UNASSIGN' + UPDATE = 'UPDATE' + VALIDATE = 'VALIDATE' + LOGIN = 'LOGIN' + ROTATE = 'ROTATE' + SCHEDULEROTATION = 'SCHEDULEROTATION' + SCHEDULEROTATIONALERT = 'SCHEDULEROTATIONALERT' + IMPORT = 'IMPORT' + GETIMPORTPARAMETERS = 'GETIMPORTPARAMETERS' + PING = 'PING' + GETCLOUDPROVIDER = 'GETCLOUDPROVIDER' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RequestActionType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_audit_after_options.py b/skyflow/generated/rest/models/v1_audit_after_options.py new file mode 100644 index 00000000..f8c441ef --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_after_options.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditAfterOptions(BaseModel): + """ + V1AuditAfterOptions + """ # noqa: E501 + timestamp: Optional[StrictStr] = Field(default=None, description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.") + change_id: Optional[StrictStr] = Field(default=None, description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.", alias="changeID") + __properties: ClassVar[List[str]] = ["timestamp", "changeID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditAfterOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditAfterOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "changeID": obj.get("changeID") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_event_response.py b/skyflow/generated/rest/models/v1_audit_event_response.py new file mode 100644 index 00000000..bb78dfc8 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_event_response.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditEventResponse(BaseModel): + """ + Contains fields for defining Response Properties. + """ # noqa: E501 + code: Optional[StrictInt] = Field(default=None, description="The status of the overall operation.") + message: Optional[StrictStr] = Field(default=None, description="The status message for the overall operation.") + data: Optional[AuditEventData] = None + timestamp: Optional[StrictStr] = Field(default=None, description="time when this response is generated, use extention method to set it.") + __properties: ClassVar[List[str]] = ["code", "message", "data", "timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditEventResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditEventResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "code": obj.get("code"), + "message": obj.get("message"), + "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, + "timestamp": obj.get("timestamp") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response.py b/skyflow/generated/rest/models/v1_audit_response.py new file mode 100644 index 00000000..06a3d0df --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponse(BaseModel): + """ + V1AuditResponse + """ # noqa: E501 + event: Optional[List[V1AuditResponseEvent]] = Field(default=None, description="Events matching the query.") + next_ops: Optional[V1AuditAfterOptions] = Field(default=None, alias="nextOps") + __properties: ClassVar[List[str]] = ["event", "nextOps"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in event (list) + _items = [] + if self.event: + for _item_event in self.event: + if _item_event: + _items.append(_item_event.to_dict()) + _dict['event'] = _items + # override the default output from pydantic by calling `to_dict()` of next_ops + if self.next_ops: + _dict['nextOps'] = self.next_ops.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "event": [V1AuditResponseEvent.from_dict(_item) for _item in obj["event"]] if obj.get("event") is not None else None, + "nextOps": V1AuditAfterOptions.from_dict(obj["nextOps"]) if obj.get("nextOps") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response_event.py b/skyflow/generated/rest/models/v1_audit_response_event.py new file mode 100644 index 00000000..0edd2a52 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response_event.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponseEvent(BaseModel): + """ + Audit event details. + """ # noqa: E501 + context: Optional[AuditEventContext] = None + request: Optional[V1AuditResponseEventRequest] = None + response: Optional[V1AuditEventResponse] = None + parent_account_id: Optional[StrictStr] = Field(default=None, description="Parent account ID of the account that made the request, if any.", alias="parentAccountID") + account_id: Optional[StrictStr] = Field(default=None, description="ID of the account that made the request.", alias="accountID") + resource_ids: Optional[List[StrictStr]] = Field(default=None, description="IDs for resources involved in the event. Presented in `{resourceType}/{resourceID}` format. For example, `VAULT/cd1d815aa09b4cbfbb803bd20349f202`.", alias="resourceIDs") + __properties: ClassVar[List[str]] = ["context", "request", "response", "parentAccountID", "accountID", "resourceIDs"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponseEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of context + if self.context: + _dict['context'] = self.context.to_dict() + # override the default output from pydantic by calling `to_dict()` of request + if self.request: + _dict['request'] = self.request.to_dict() + # override the default output from pydantic by calling `to_dict()` of response + if self.response: + _dict['response'] = self.response.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponseEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "context": AuditEventContext.from_dict(obj["context"]) if obj.get("context") is not None else None, + "request": V1AuditResponseEventRequest.from_dict(obj["request"]) if obj.get("request") is not None else None, + "response": V1AuditEventResponse.from_dict(obj["response"]) if obj.get("response") is not None else None, + "parentAccountID": obj.get("parentAccountID"), + "accountID": obj.get("accountID"), + "resourceIDs": obj.get("resourceIDs") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response_event_request.py b/skyflow/generated/rest/models/v1_audit_response_event_request.py new file mode 100644 index 00000000..2b4c6546 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response_event_request.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.request_action_type import RequestActionType +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponseEventRequest(BaseModel): + """ + Contains fields for defining Request Properties. + """ # noqa: E501 + data: Optional[AuditEventData] = None + api_name: Optional[StrictStr] = Field(default=None, description="API name.", alias="apiName") + workspace_id: Optional[StrictStr] = Field(default=None, description="The workspaceID (if any) of the request.", alias="workspaceID") + vault_id: Optional[StrictStr] = Field(default=None, description="The vaultID (if any) of the request.", alias="vaultID") + tags: Optional[List[StrictStr]] = Field(default=None, description="Tags associated with the event. To provide better search capabilities. Like login.") + timestamp: Optional[StrictStr] = Field(default=None, description="time when this request is generated, use extention method to set it.") + action_type: Optional[RequestActionType] = Field(default=RequestActionType.NONE, alias="actionType") + resource_type: Optional[AuditEventAuditResourceType] = Field(default=AuditEventAuditResourceType.NONE_API, alias="resourceType") + http_info: Optional[AuditEventHTTPInfo] = Field(default=None, alias="httpInfo") + __properties: ClassVar[List[str]] = ["data", "apiName", "workspaceID", "vaultID", "tags", "timestamp", "actionType", "resourceType", "httpInfo"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponseEventRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + # override the default output from pydantic by calling `to_dict()` of http_info + if self.http_info: + _dict['httpInfo'] = self.http_info.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponseEventRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, + "apiName": obj.get("apiName"), + "workspaceID": obj.get("workspaceID"), + "vaultID": obj.get("vaultID"), + "tags": obj.get("tags"), + "timestamp": obj.get("timestamp"), + "actionType": obj.get("actionType") if obj.get("actionType") is not None else RequestActionType.NONE, + "resourceType": obj.get("resourceType") if obj.get("resourceType") is not None else AuditEventAuditResourceType.NONE_API, + "httpInfo": AuditEventHTTPInfo.from_dict(obj["httpInfo"]) if obj.get("httpInfo") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_batch_operation_response.py b/skyflow/generated/rest/models/v1_batch_operation_response.py new file mode 100644 index 00000000..b790403f --- /dev/null +++ b/skyflow/generated/rest/models/v1_batch_operation_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1BatchOperationResponse(BaseModel): + """ + V1BatchOperationResponse + """ # noqa: E501 + vault_id: Optional[StrictStr] = Field(default=None, description="ID of the vault.", alias="vaultID") + responses: Optional[List[Dict[str, Any]]] = Field(default=None, description="Responses in the same order as in the request. Responses have the same payload structure as their corresponding APIs:
  • `POST` returns an Insert Records response.
  • `PUT` returns an Update Record response.
  • `GET` returns a Get Record response.
  • `DELETE` returns a Delete Record response.
") + __properties: ClassVar[List[str]] = ["vaultID", "responses"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BatchOperationResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BatchOperationResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "vaultID": obj.get("vaultID"), + "responses": obj.get("responses") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_batch_record.py b/skyflow/generated/rest/models/v1_batch_record.py new file mode 100644 index 00000000..76480a55 --- /dev/null +++ b/skyflow/generated/rest/models/v1_batch_record.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from typing import Optional, Set +from typing_extensions import Self + +class V1BatchRecord(BaseModel): + """ + V1BatchRecord + """ # noqa: E501 + fields: Optional[Dict[str, Any]] = Field(default=None, description="Field and value key pairs. For example, `{'field_1':'value_1', 'field_2':'value_2'}`. Only valid when `method` is `POST` or `PUT`.") + table_name: Optional[StrictStr] = Field(default=None, description="Name of the table to perform the operation on.", alias="tableName") + method: Optional[BatchRecordMethod] = BatchRecordMethod.NONE + batch_id: Optional[StrictStr] = Field(default=None, description="ID to group operations by. Operations in the same group are executed sequentially.", alias="batchID") + redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.") + id: Optional[StrictStr] = Field(default=None, description="`skyflow_id` for the record. Only valid when `method` is `GET`, `DELETE`, or `PUT`.", alias="ID") + download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") + upsert: Optional[StrictStr] = Field(default=None, description="Column that stores primary keys for upsert operations. The column must be marked as unique in the vault schema. Only valid when `method` is `POST`.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") + __properties: ClassVar[List[str]] = ["fields", "tableName", "method", "batchID", "redaction", "tokenization", "ID", "downloadURL", "upsert", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BatchRecord from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BatchRecord from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "tableName": obj.get("tableName"), + "method": obj.get("method") if obj.get("method") is not None else BatchRecordMethod.NONE, + "batchID": obj.get("batchID"), + "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT, + "tokenization": obj.get("tokenization"), + "ID": obj.get("ID"), + "downloadURL": obj.get("downloadURL"), + "upsert": obj.get("upsert"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bin_list_request.py b/skyflow/generated/rest/models/v1_bin_list_request.py new file mode 100644 index 00000000..71de651e --- /dev/null +++ b/skyflow/generated/rest/models/v1_bin_list_request.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig +from typing import Optional, Set +from typing_extensions import Self + +class V1BINListRequest(BaseModel): + """ + Request to return specific card metadata. + """ # noqa: E501 + fields: Optional[List[StrictStr]] = Field(default=None, description="Fields to return. If not specified, all fields are returned.") + bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") + vault_schema_config: Optional[V1VaultSchemaConfig] = None + skyflow_id: Optional[StrictStr] = Field(default=None, description="skyflow_id of the record.") + __properties: ClassVar[List[str]] = ["fields", "BIN", "vault_schema_config", "skyflow_id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BINListRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of vault_schema_config + if self.vault_schema_config: + _dict['vault_schema_config'] = self.vault_schema_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BINListRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "BIN": obj.get("BIN"), + "vault_schema_config": V1VaultSchemaConfig.from_dict(obj["vault_schema_config"]) if obj.get("vault_schema_config") is not None else None, + "skyflow_id": obj.get("skyflow_id") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bin_list_response.py b/skyflow/generated/rest/models/v1_bin_list_response.py new file mode 100644 index 00000000..becf8bb4 --- /dev/null +++ b/skyflow/generated/rest/models/v1_bin_list_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_card import V1Card +from typing import Optional, Set +from typing_extensions import Self + +class V1BINListResponse(BaseModel): + """ + Response to the Get BIN request. + """ # noqa: E501 + cards_data: Optional[List[V1Card]] = Field(default=None, description="Card metadata associated with the specified BIN.") + __properties: ClassVar[List[str]] = ["cards_data"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BINListResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in cards_data (list) + _items = [] + if self.cards_data: + for _item_cards_data in self.cards_data: + if _item_cards_data: + _items.append(_item_cards_data.to_dict()) + _dict['cards_data'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BINListResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cards_data": [V1Card.from_dict(_item) for _item in obj["cards_data"]] if obj.get("cards_data") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bulk_delete_record_response.py b/skyflow/generated/rest/models/v1_bulk_delete_record_response.py new file mode 100644 index 00000000..726e1c40 --- /dev/null +++ b/skyflow/generated/rest/models/v1_bulk_delete_record_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1BulkDeleteRecordResponse(BaseModel): + """ + V1BulkDeleteRecordResponse + """ # noqa: E501 + record_id_response: Optional[List[StrictStr]] = Field(default=None, description="IDs for the deleted records, or `*` if all records were deleted.", alias="RecordIDResponse") + __properties: ClassVar[List[str]] = ["RecordIDResponse"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BulkDeleteRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BulkDeleteRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "RecordIDResponse": obj.get("RecordIDResponse") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bulk_get_record_response.py b/skyflow/generated/rest/models/v1_bulk_get_record_response.py new file mode 100644 index 00000000..df8095df --- /dev/null +++ b/skyflow/generated/rest/models/v1_bulk_get_record_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class V1BulkGetRecordResponse(BaseModel): + """ + V1BulkGetRecordResponse + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="The specified records.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BulkGetRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BulkGetRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_byot.py b/skyflow/generated/rest/models/v1_byot.py new file mode 100644 index 00000000..754a70dc --- /dev/null +++ b/skyflow/generated/rest/models/v1_byot.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1BYOT(str, Enum): + """ + Token insertion behavior. - DISABLE: Tokens aren't allowed for any fields. If tokens are specified, the request fails. - ENABLE: Tokens are allowed—but not required—for all fields. If tokens are specified, they're inserted. - ENABLE_STRICT: Tokens are required for all fields. If tokens are specified, they're inserted. If not, the request fails. + """ + + """ + allowed enum values + """ + DISABLE = 'DISABLE' + ENABLE = 'ENABLE' + ENABLE_STRICT = 'ENABLE_STRICT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1BYOT from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_card.py b/skyflow/generated/rest/models/v1_card.py new file mode 100644 index 00000000..2245ee74 --- /dev/null +++ b/skyflow/generated/rest/models/v1_card.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1Card(BaseModel): + """ + Card metadata of the requested BIN. + """ # noqa: E501 + bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") + issuer_name: Optional[StrictStr] = Field(default=None, description="Name of the card issuer bank.") + country_code: Optional[StrictStr] = Field(default=None, description="Country code of the card.") + currency: Optional[StrictStr] = Field(default=None, description="Currency of the card.") + card_type: Optional[StrictStr] = Field(default=None, description="Type of the card.") + card_category: Optional[StrictStr] = Field(default=None, description="Category of the card.") + card_scheme: Optional[StrictStr] = Field(default=None, description="Scheme of the card.") + card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Last four digits of the card number.") + card_expiry: Optional[StrictStr] = Field(default=None, description="Expiry date of the card.") + __properties: ClassVar[List[str]] = ["BIN", "issuer_name", "country_code", "currency", "card_type", "card_category", "card_scheme", "card_last_four_digits", "card_expiry"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1Card from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1Card from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "BIN": obj.get("BIN"), + "issuer_name": obj.get("issuer_name"), + "country_code": obj.get("country_code"), + "currency": obj.get("currency"), + "card_type": obj.get("card_type"), + "card_category": obj.get("card_category"), + "card_scheme": obj.get("card_scheme"), + "card_last_four_digits": obj.get("card_last_four_digits"), + "card_expiry": obj.get("card_expiry") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_delete_file_response.py b/skyflow/generated/rest/models/v1_delete_file_response.py new file mode 100644 index 00000000..e68030c0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_delete_file_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1DeleteFileResponse(BaseModel): + """ + V1DeleteFileResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the record.") + deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the file was deleted.") + __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DeleteFileResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DeleteFileResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "deleted": obj.get("deleted") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_delete_record_response.py b/skyflow/generated/rest/models/v1_delete_record_response.py new file mode 100644 index 00000000..a56d3ba2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_delete_record_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1DeleteRecordResponse(BaseModel): + """ + V1DeleteRecordResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the deleted record.") + deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the record was deleted.") + __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DeleteRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DeleteRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "deleted": obj.get("deleted") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_payload.py b/skyflow/generated/rest/models/v1_detokenize_payload.py new file mode 100644 index 00000000..0394aa1c --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_payload.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizePayload(BaseModel): + """ + V1DetokenizePayload + """ # noqa: E501 + detokenization_parameters: Optional[List[V1DetokenizeRecordRequest]] = Field(default=None, description="Detokenization details.", alias="detokenizationParameters") + download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") + continue_on_error: Optional[StrictBool] = Field(default=False, description="If `true`, the detokenization request continues even if an error occurs.", alias="continueOnError") + __properties: ClassVar[List[str]] = ["detokenizationParameters", "downloadURL", "continueOnError"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in detokenization_parameters (list) + _items = [] + if self.detokenization_parameters: + for _item_detokenization_parameters in self.detokenization_parameters: + if _item_detokenization_parameters: + _items.append(_item_detokenization_parameters.to_dict()) + _dict['detokenizationParameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "detokenizationParameters": [V1DetokenizeRecordRequest.from_dict(_item) for _item in obj["detokenizationParameters"]] if obj.get("detokenizationParameters") is not None else None, + "downloadURL": obj.get("downloadURL"), + "continueOnError": obj.get("continueOnError") if obj.get("continueOnError") is not None else False + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_record_request.py b/skyflow/generated/rest/models/v1_detokenize_record_request.py new file mode 100644 index 00000000..2899501b --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_record_request.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeRecordRequest(BaseModel): + """ + V1DetokenizeRecordRequest + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token that identifies the record to detokenize.") + redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT + __properties: ClassVar[List[str]] = ["token", "redaction"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token"), + "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_record_response.py b/skyflow/generated/rest/models/v1_detokenize_record_response.py new file mode 100644 index 00000000..2da5d15d --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_record_response.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeRecordResponse(BaseModel): + """ + V1DetokenizeRecordResponse + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token of the record.") + value_type: Optional[DetokenizeRecordResponseValueType] = Field(default=DetokenizeRecordResponseValueType.NONE, alias="valueType") + value: Optional[StrictStr] = Field(default=None, description="Data corresponding to the token.") + error: Optional[StrictStr] = Field(default=None, description="Error if token isn't found.") + __properties: ClassVar[List[str]] = ["token", "valueType", "value", "error"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token"), + "valueType": obj.get("valueType") if obj.get("valueType") is not None else DetokenizeRecordResponseValueType.NONE, + "value": obj.get("value"), + "error": obj.get("error") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_response.py b/skyflow/generated/rest/models/v1_detokenize_response.py new file mode 100644 index 00000000..34554aa0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeResponse(BaseModel): + """ + V1DetokenizeResponse + """ # noqa: E501 + records: Optional[List[V1DetokenizeRecordResponse]] = Field(default=None, description="Records corresponding to the specified tokens.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1DetokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_field_records.py b/skyflow/generated/rest/models/v1_field_records.py new file mode 100644 index 00000000..913fd6d0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_field_records.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1FieldRecords(BaseModel): + """ + Record values and tokens. + """ # noqa: E501 + fields: Optional[Dict[str, Any]] = Field(default=None, description="Fields and values for the record. For example, `{'field_1':'value_1', 'field_2':'value_2'}`.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") + __properties: ClassVar[List[str]] = ["fields", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1FieldRecords from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1FieldRecords from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_file_av_scan_status.py b/skyflow/generated/rest/models/v1_file_av_scan_status.py new file mode 100644 index 00000000..91479e32 --- /dev/null +++ b/skyflow/generated/rest/models/v1_file_av_scan_status.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1FileAVScanStatus(str, Enum): + """ + Anti-virus scan status of the file. + """ + + """ + allowed enum values + """ + SCAN_NONE = 'SCAN_NONE' + SCAN_CLEAN = 'SCAN_CLEAN' + SCAN_INFECTED = 'SCAN_INFECTED' + SCAN_DELETED = 'SCAN_DELETED' + SCAN_ERROR = 'SCAN_ERROR' + SCAN_PENDING = 'SCAN_PENDING' + SCAN_UNSCANNABLE = 'SCAN_UNSCANNABLE' + SCAN_FILE_NOT_FOUND = 'SCAN_FILE_NOT_FOUND' + SCAN_INVALID = 'SCAN_INVALID' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1FileAVScanStatus from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_get_auth_token_request.py b/skyflow/generated/rest/models/v1_get_auth_token_request.py new file mode 100644 index 00000000..fd5b201f --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_auth_token_request.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1GetAuthTokenRequest(BaseModel): + """ + V1GetAuthTokenRequest + """ # noqa: E501 + grant_type: StrictStr = Field(description="Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`.") + assertion: StrictStr = Field(description="User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
") + subject_token: Optional[StrictStr] = Field(default=None, description="Subject token.") + subject_token_type: Optional[StrictStr] = Field(default=None, description="Subject token type.") + requested_token_use: Optional[StrictStr] = Field(default=None, description="Token use type. Either `delegation` or `impersonation`.") + scope: Optional[StrictStr] = Field(default=None, description="Subset of available roles to associate with the requested token. Uses the format \"role:\\ role:\\\".") + __properties: ClassVar[List[str]] = ["grant_type", "assertion", "subject_token", "subject_token_type", "requested_token_use", "scope"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetAuthTokenRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetAuthTokenRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "grant_type": obj.get("grant_type"), + "assertion": obj.get("assertion"), + "subject_token": obj.get("subject_token"), + "subject_token_type": obj.get("subject_token_type"), + "requested_token_use": obj.get("requested_token_use"), + "scope": obj.get("scope") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_auth_token_response.py b/skyflow/generated/rest/models/v1_get_auth_token_response.py new file mode 100644 index 00000000..c3fccac2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_auth_token_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1GetAuthTokenResponse(BaseModel): + """ + V1GetAuthTokenResponse + """ # noqa: E501 + access_token: Optional[StrictStr] = Field(default=None, description="AccessToken.", alias="accessToken") + token_type: Optional[StrictStr] = Field(default=None, description="TokenType : Bearer.", alias="tokenType") + __properties: ClassVar[List[str]] = ["accessToken", "tokenType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetAuthTokenResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetAuthTokenResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessToken": obj.get("accessToken"), + "tokenType": obj.get("tokenType") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_file_scan_status_response.py b/skyflow/generated/rest/models/v1_get_file_scan_status_response.py new file mode 100644 index 00000000..78d83d19 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_file_scan_status_response.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from typing import Optional, Set +from typing_extensions import Self + +class V1GetFileScanStatusResponse(BaseModel): + """ + V1GetFileScanStatusResponse + """ # noqa: E501 + av_scan_status: Optional[V1FileAVScanStatus] = V1FileAVScanStatus.SCAN_NONE + __properties: ClassVar[List[str]] = ["av_scan_status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetFileScanStatusResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetFileScanStatusResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "av_scan_status": obj.get("av_scan_status") if obj.get("av_scan_status") is not None else V1FileAVScanStatus.NONE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_query_response.py b/skyflow/generated/rest/models/v1_get_query_response.py new file mode 100644 index 00000000..3f7dd870 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_query_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class V1GetQueryResponse(BaseModel): + """ + V1GetQueryResponse + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="Records returned by the query.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetQueryResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetQueryResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_insert_record_response.py b/skyflow/generated/rest/models/v1_insert_record_response.py new file mode 100644 index 00000000..142f1304 --- /dev/null +++ b/skyflow/generated/rest/models/v1_insert_record_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from typing import Optional, Set +from typing_extensions import Self + +class V1InsertRecordResponse(BaseModel): + """ + V1InsertRecordResponse + """ # noqa: E501 + records: Optional[List[V1RecordMetaProperties]] = Field(default=None, description="Identifiers for the inserted records.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1InsertRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1InsertRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1RecordMetaProperties.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_member_type.py b/skyflow/generated/rest/models/v1_member_type.py new file mode 100644 index 00000000..60009732 --- /dev/null +++ b/skyflow/generated/rest/models/v1_member_type.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1MemberType(str, Enum): + """ + Type of the member. + """ + + """ + allowed enum values + """ + NONE = 'NONE' + USER = 'USER' + SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1MemberType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_record_meta_properties.py b/skyflow/generated/rest/models/v1_record_meta_properties.py new file mode 100644 index 00000000..add596f2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_record_meta_properties.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1RecordMetaProperties(BaseModel): + """ + V1RecordMetaProperties + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the inserted record.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") + __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1RecordMetaProperties from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1RecordMetaProperties from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_payload.py b/skyflow/generated/rest/models/v1_tokenize_payload.py new file mode 100644 index 00000000..8a275f2b --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_payload.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizePayload(BaseModel): + """ + V1TokenizePayload + """ # noqa: E501 + tokenization_parameters: Optional[List[V1TokenizeRecordRequest]] = Field(default=None, description="Tokenization details.", alias="tokenizationParameters") + __properties: ClassVar[List[str]] = ["tokenizationParameters"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tokenization_parameters (list) + _items = [] + if self.tokenization_parameters: + for _item_tokenization_parameters in self.tokenization_parameters: + if _item_tokenization_parameters: + _items.append(_item_tokenization_parameters.to_dict()) + _dict['tokenizationParameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tokenizationParameters": [V1TokenizeRecordRequest.from_dict(_item) for _item in obj["tokenizationParameters"]] if obj.get("tokenizationParameters") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_record_request.py b/skyflow/generated/rest/models/v1_tokenize_record_request.py new file mode 100644 index 00000000..e69e1e93 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_record_request.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeRecordRequest(BaseModel): + """ + V1TokenizeRecordRequest + """ # noqa: E501 + value: Optional[StrictStr] = Field(default=None, description="Existing value to return a token for.") + column_group: Optional[StrictStr] = Field(default=None, description="Name of the column group that the value belongs to.", alias="columnGroup") + __properties: ClassVar[List[str]] = ["value", "columnGroup"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeRecordRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeRecordRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value"), + "columnGroup": obj.get("columnGroup") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_record_response.py b/skyflow/generated/rest/models/v1_tokenize_record_response.py new file mode 100644 index 00000000..24ac8311 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_record_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeRecordResponse(BaseModel): + """ + V1TokenizeRecordResponse + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token corresponding to a value.") + __properties: ClassVar[List[str]] = ["token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_response.py b/skyflow/generated/rest/models/v1_tokenize_response.py new file mode 100644 index 00000000..4847bae5 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeResponse(BaseModel): + """ + V1TokenizeResponse + """ # noqa: E501 + records: Optional[List[V1TokenizeRecordResponse]] = Field(default=None, description="Tokens corresponding to the specified values.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1TokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_update_record_response.py b/skyflow/generated/rest/models/v1_update_record_response.py new file mode 100644 index 00000000..0d66a403 --- /dev/null +++ b/skyflow/generated/rest/models/v1_update_record_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1UpdateRecordResponse(BaseModel): + """ + V1UpdateRecordResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the updated record.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") + __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1UpdateRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1UpdateRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_vault_field_mapping.py b/skyflow/generated/rest/models/v1_vault_field_mapping.py new file mode 100644 index 00000000..b00c92e5 --- /dev/null +++ b/skyflow/generated/rest/models/v1_vault_field_mapping.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1VaultFieldMapping(BaseModel): + """ + Mapping of the fields in the vault to the fields to use for the lookup. + """ # noqa: E501 + card_number: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number.") + card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number suffix.") + card_expiry: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the expiry date.") + __properties: ClassVar[List[str]] = ["card_number", "card_last_four_digits", "card_expiry"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1VaultFieldMapping from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1VaultFieldMapping from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "card_number": obj.get("card_number"), + "card_last_four_digits": obj.get("card_last_four_digits"), + "card_expiry": obj.get("card_expiry") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_vault_schema_config.py b/skyflow/generated/rest/models/v1_vault_schema_config.py new file mode 100644 index 00000000..e57e21ff --- /dev/null +++ b/skyflow/generated/rest/models/v1_vault_schema_config.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from typing import Optional, Set +from typing_extensions import Self + +class V1VaultSchemaConfig(BaseModel): + """ + Details of the vault that stores additional card details. + """ # noqa: E501 + id: Optional[StrictStr] = Field(default=None, description="ID of the vault that stores card details.") + table_name: Optional[StrictStr] = Field(default=None, description="Name of the table that stores card details.") + mapping: Optional[V1VaultFieldMapping] = None + __properties: ClassVar[List[str]] = ["id", "table_name", "mapping"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1VaultSchemaConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of mapping + if self.mapping: + _dict['mapping'] = self.mapping.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1VaultSchemaConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "table_name": obj.get("table_name"), + "mapping": V1VaultFieldMapping.from_dict(obj["mapping"]) if obj.get("mapping") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/py.typed b/skyflow/generated/rest/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/generated/rest/rest.py b/skyflow/generated/rest/rest.py new file mode 100644 index 00000000..1aaefdb3 --- /dev/null +++ b/skyflow/generated/rest/rest.py @@ -0,0 +1,258 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import io +import json +import re +import ssl + +import urllib3 + +from skyflow.generated.rest.exceptions import ApiException, ApiValueError + +SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} +RESTResponseType = urllib3.HTTPResponse + + +def is_socks_proxy_url(url): + if url is None: + return False + split_section = url.split("://") + if len(split_section) < 2: + return False + else: + return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES + + +class RESTResponse(io.IOBase): + + def __init__(self, resp) -> None: + self.response = resp + self.status = resp.status + self.reason = resp.reason + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.data + return self.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.response.headers + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.response.headers.get(name, default) + + +class RESTClientObject: + + def __init__(self, configuration) -> None: + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + pool_args = { + "cert_reqs": cert_reqs, + "ca_certs": configuration.ssl_ca_cert, + "cert_file": configuration.cert_file, + "key_file": configuration.key_file, + } + if configuration.assert_hostname is not None: + pool_args['assert_hostname'] = ( + configuration.assert_hostname + ) + + if configuration.retries is not None: + pool_args['retries'] = configuration.retries + + if configuration.tls_server_name: + pool_args['server_hostname'] = configuration.tls_server_name + + + if configuration.socket_options is not None: + pool_args['socket_options'] = configuration.socket_options + + if configuration.connection_pool_maxsize is not None: + pool_args['maxsize'] = configuration.connection_pool_maxsize + + # https pool manager + self.pool_manager: urllib3.PoolManager + + if configuration.proxy: + if is_socks_proxy_url(configuration.proxy): + from urllib3.contrib.socks import SOCKSProxyManager + pool_args["proxy_url"] = configuration.proxy + pool_args["headers"] = configuration.proxy_headers + self.pool_manager = SOCKSProxyManager(**pool_args) + else: + pool_args["proxy_url"] = configuration.proxy + pool_args["proxy_headers"] = configuration.proxy_headers + self.pool_manager = urllib3.ProxyManager(**pool_args) + else: + self.pool_manager = urllib3.PoolManager(**pool_args) + + def request( + self, + method, + url, + headers=None, + body=None, + post_params=None, + _request_timeout=None + ): + """Perform requests. + + :param method: http request method + :param url: http request url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in [ + 'GET', + 'HEAD', + 'DELETE', + 'POST', + 'PUT', + 'PATCH', + 'OPTIONS' + ] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, float)): + timeout = urllib3.Timeout(total=_request_timeout) + elif ( + isinstance(_request_timeout, tuple) + and len(_request_timeout) == 2 + ): + timeout = urllib3.Timeout( + connect=_request_timeout[0], + read=_request_timeout[1] + ) + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + + # no content type provided or payload is json + content_type = headers.get('Content-Type') + if ( + not content_type + or re.search('json', content_type, re.IGNORECASE) + ): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, + url, + body=request_body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'application/x-www-form-urlencoded': + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=False, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + # Ensures that dict objects are serialized + post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params] + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=True, + timeout=timeout, + headers=headers, + preload_content=False + ) + # Pass a `string` parameter directly in the body to support + # other content types than JSON when `body` argument is + # provided in serialized form. + elif isinstance(body, str) or isinstance(body, bytes): + r = self.pool_manager.request( + method, + url, + body=body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.pool_manager.request( + method, + url, + body=request_body, + preload_content=False, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request( + method, + url, + fields={}, + timeout=timeout, + headers=headers, + preload_content=False + ) + except urllib3.exceptions.SSLError as e: + msg = "\n".join([type(e).__name__, str(e)]) + raise ApiException(status=0, reason=msg) + + return RESTResponse(r) diff --git a/skyflow/service_account/__init__.py b/skyflow/service_account/__init__.py index dcd8bca9..20d09a8e 100644 --- a/skyflow/service_account/__init__.py +++ b/skyflow/service_account/__init__.py @@ -1,8 +1 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._token import generate_bearer_token -from ._token import generate_bearer_token -from ._token import ResponseToken -from ._token import generate_bearer_token_from_creds -from ._validity import is_expired +from ._utils import generate_bearer_token, generate_bearer_token_from_creds, is_expired, generate_signed_data_tokens, generate_signed_data_tokens_from_creds \ No newline at end of file diff --git a/skyflow/service_account/_token.py b/skyflow/service_account/_token.py deleted file mode 100644 index f73191b1..00000000 --- a/skyflow/service_account/_token.py +++ /dev/null @@ -1,179 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import jwt -import datetime -import requests -from warnings import warn -from collections import namedtuple -from skyflow._utils import log_info, InterfaceName, InfoMessages, getMetrics - - -from skyflow.errors._skyflow_errors import * - -ResponseToken = namedtuple('ResponseToken', ['AccessToken', 'TokenType']) -interface = InterfaceName.GENERATE_BEARER_TOKEN - - -def generate_bearer_token(credentialsFilePath: str) -> ResponseToken: - ''' - This function is used to get the access token for skyflow Service Accounts. - `credentialsFilePath` is the file path in string of the credentials file that is downloaded after Service Account creation. - - Response Token is a named tupe with two attributes: - 1. AccessToken: The access token - 2. TokenType: The type of access token (eg: Bearer) - ''' - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_TRIGGERED.value, - interface=interface) - - try: - credentialsFile = open(credentialsFilePath, 'r') - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FILE_NOT_FOUND.value % (credentialsFilePath), interface=interface) - - try: - credentials = json.load(credentialsFile) - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FILE_INVALID_JSON.value % (credentialsFilePath), interface=interface) - finally: - credentialsFile.close() - - result = getSAToken(credentials) - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_SUCCESS.value, - interface=interface) - return result - - -def generate_bearer_token_from_creds(credentials: str) -> ResponseToken: - ''' - This function is used to get the access token for skyflow Service Accounts. - `credentials` arg takes the content of the credentials file that is downloaded after Service Account creation. - - Response Token is a named tupe with two attributes: - 1. AccessToken: The access token - 2. TokenType: The type of access token (eg: Bearer) - ''' - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_TRIGGERED.value, - interface=interface) - try: - jsonCredentials = json.loads(credentials) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_CREDENTIALS, interface=interface) - result = getSAToken(jsonCredentials) - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_SUCCESS.value, - interface=interface) - return result - - -def getSAToken(credentials): - try: - privateKey = credentials["privateKey"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_PRIVATE_KEY, interface=interface) - try: - clientID = credentials["clientID"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_CLIENT_ID, interface=interface) - try: - keyID = credentials["keyID"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_KEY_ID, interface=interface) - try: - tokenURI = credentials["tokenURI"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_TOKEN_URI, interface=interface) - - signedToken = getSignedJWT(clientID, keyID, tokenURI, privateKey) - - response = sendRequestWithToken(tokenURI, signedToken) - content = response.content.decode('utf-8') - - try: - token = json.loads(content) - except json.decoder.JSONDecodeError as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RESPONSE_NOT_JSON % content, interface=interface) - return getResponseToken(token) - - -def getSignedJWT(clientID, keyID, tokenURI, privateKey): - payload = { - "iss": clientID, - "key": keyID, - "aud": tokenURI, - "sub": clientID, - "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60) - } - try: - return jwt.encode(payload=payload, key=privateKey, algorithm="RS256") - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.JWT_INVALID_FORMAT, interface=interface) - - -def sendRequestWithToken(url, token): - headers = { - "content-type": "application/json", - "sky-metadata": json.dumps(getMetrics()) - } - payload = { - "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", - "assertion": token - } - try: - response = requests.post(url=url, json=payload, headers=headers) - statusCode = response.status_code - except requests.exceptions.InvalidURL: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_URL.value % (url), interface=interface) - except requests.exceptions.MissingSchema: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_URL.value % (url), interface=interface) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if error.response != None and error.response.content != None: - try: - errorResponse = json.loads( - error.response.content.decode('utf-8')) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % error.response.content.decode( - 'utf-8') - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - raise SkyflowError(statusCode, message, interface=interface) - - return response - - -def getResponseToken(token): - try: - accessToken = token["accessToken"] - except: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, - SkyflowErrorMessages.MISSING_ACCESS_TOKEN, interface=interface) - - try: - tokenType = token["tokenType"] - except: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, - SkyflowErrorMessages.MISSING_TOKEN_TYPE, interface=interface) - - return ResponseToken(AccessToken=accessToken, TokenType=tokenType) diff --git a/skyflow/service_account/_utils.py b/skyflow/service_account/_utils.py new file mode 100644 index 00000000..78617670 --- /dev/null +++ b/skyflow/service_account/_utils.py @@ -0,0 +1,178 @@ +import json +import datetime +import time +import jwt +from skyflow.error import SkyflowError +from skyflow.generated.rest.models import V1GetAuthTokenRequest +from skyflow.service_account.client.auth_client import AuthClient +from skyflow.utils.logger import log_info, log_error_log +from skyflow.utils import get_base_url, format_scope, SkyflowMessages + + +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def is_expired(token, logger = None): + if len(token) == 0: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_BEARER_TOKEN.value) + return True + + try: + decoded = jwt.decode( + token, options={"verify_signature": False, "verify_aud": False}) + if time.time() >= decoded['exp']: + log_info(SkyflowMessages.Info.BEARER_TOKEN_EXPIRED.value, logger) + log_error_log(SkyflowMessages.ErrorLogs.INVALID_BEARER_TOKEN.value) + return True + return False + except jwt.ExpiredSignatureError: + return True + except Exception: + log_error_log(SkyflowMessages.Error.JWT_DECODE_ERROR.value, logger) + return True + +def generate_bearer_token(credentials_file_path, options = None, logger = None): + try: + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_TRIGGERED.value, logger) + credentials_file =open(credentials_file_path, 'r') + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value, invalid_input_error_code) + + try: + credentials = json.load(credentials_file) + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_CREDENTIALS_FILE.value, logger = logger) + raise SkyflowError(SkyflowMessages.Error.FILE_INVALID_JSON.value.format(credentials_file_path), invalid_input_error_code) + + finally: + credentials_file.close() + result = get_service_account_token(credentials, options, logger) + return result + +def generate_bearer_token_from_creds(credentials, options = None, logger = None): + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_TRIGGERED.value, logger) + credentials = credentials.strip() + try: + json_credentials = json.loads(credentials.replace('\n', '\\n')) + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value, invalid_input_error_code) + result = get_service_account_token(json_credentials, options, logger) + return result + +def get_service_account_token(credentials, options, logger): + try: + private_key = credentials["privateKey"] + except: + log_error_log(SkyflowMessages.ErrorLogs.PRIVATE_KEY_IS_REQUIRED.value, logger = logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_PRIVATE_KEY.value, invalid_input_error_code) + try: + client_id = credentials["clientID"] + except: + log_error_log(SkyflowMessages.ErrorLogs.CLIENT_ID_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_CLIENT_ID.value, invalid_input_error_code) + try: + key_id = credentials["keyID"] + except: + log_error_log(SkyflowMessages.ErrorLogs.KEY_ID_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_KEY_ID.value, invalid_input_error_code) + try: + token_uri = credentials["tokenURI"] + except: + log_error_log(SkyflowMessages.ErrorLogs.TOKEN_URI_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_TOKEN_URI.value, invalid_input_error_code) + + signed_token = get_signed_jwt(options, client_id, key_id, token_uri, private_key, logger) + base_url = get_base_url(token_uri) + auth_client = AuthClient(base_url) + auth_api = auth_client.get_auth_api() + + formatted_scope = None + if options and "role_ids" in options: + formatted_scope = format_scope(options.get("role_ids")) + + request = V1GetAuthTokenRequest(assertion = signed_token, + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + scope=formatted_scope) + response = auth_api.authentication_service_get_auth_token(request) + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_SUCCESS.value, logger) + return response.access_token, response.token_type + +def get_signed_jwt(options, client_id, key_id, token_uri, private_key, logger): + payload = { + "iss": client_id, + "key": key_id, + "aud": token_uri, + "sub": client_id, + "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60) + } + if options and "ctx" in options: + payload["ctx"] = options.get("ctx") + try: + return jwt.encode(payload=payload, key=private_key, algorithm="RS256") + except Exception: + raise SkyflowError(SkyflowMessages.Error.JWT_INVALID_FORMAT.value, invalid_input_error_code) + + + +def get_signed_tokens(credentials_obj, options): + try: + expiry_time = int(time.time()) + options.get("time_to_live", 60) + prefix = "signed_token_" + + if options and options.get("data_tokens"): + for token in options["data_tokens"]: + claims = { + "iss": "sdk", + "key": credentials_obj.get("keyID"), + "exp": expiry_time, + "sub": credentials_obj.get("clientID"), + "tok": token, + "iat": int(time.time()), + } + + if "ctx" in options: + claims["ctx"] = options["ctx"] + + private_key = credentials_obj.get("privateKey") + signed_jwt = jwt.encode(claims, private_key, algorithm="RS256") + response_object = get_signed_data_token_response_object(prefix + signed_jwt, token) + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKEN_SUCCESS.value) + return response_object + + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS.value, invalid_input_error_code) + + +def generate_signed_data_tokens(credentials_file_path, options): + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKENS_TRIGGERED.value) + try: + credentials_file =open(credentials_file_path, 'r') + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value, invalid_input_error_code) + + try: + credentials = json.load(credentials_file) + except Exception: + raise SkyflowError(SkyflowMessages.Error.FILE_INVALID_JSON.value.format(credentials_file_path), + invalid_input_error_code) + + finally: + credentials_file.close() + + return get_signed_tokens(credentials, options) + +def generate_signed_data_tokens_from_creds(credentials, options): + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKENS_TRIGGERED.value) + credentials = credentials.strip() + try: + json_credentials = json.loads(credentials.replace('\n', '\\n')) + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_CREDENTIALS_FILE.value) + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value, invalid_input_error_code) + return get_signed_tokens(json_credentials, options) + +def get_signed_data_token_response_object(signed_token, actual_token): + response_object = { + "token": actual_token, + "signed_token": signed_token + } + return response_object.get("token"), response_object.get("signed_token") diff --git a/skyflow/service_account/_validity.py b/skyflow/service_account/_validity.py deleted file mode 100644 index 8b9229ac..00000000 --- a/skyflow/service_account/_validity.py +++ /dev/null @@ -1,33 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from warnings import warn -import jwt -import time -from skyflow.errors._skyflow_errors import * -from skyflow._utils import InterfaceName, log_info, log_error, InfoMessages - - -def is_expired(token: str): - ''' - Check if stored token is not expired, if not return a new token, - if the token has expiry time before 5min of current time, call returns False - ''' - interface = InterfaceName.IS_EXPIRED.value - log_info(InfoMessages.IS_EXPIRED_TRIGGERED.value, interface=interface) - if len(token) == 0: - log_info(InfoMessages.EMPTY_ACCESS_TOKEN, interface=interface) - return True - - try: - decoded = jwt.decode( - token, options={"verify_signature": False, "verify_aud": False}) - if time.time() < decoded['exp']: - return False - except jwt.ExpiredSignatureError: - return True - except Exception as e: - log_error(InfoMessages.INVALID_TOKEN.value, interface=interface) - return True - - return True diff --git a/skyflow/service_account/client/__init__.py b/skyflow/service_account/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/service_account/client/auth_client.py b/skyflow/service_account/client/auth_client.py new file mode 100644 index 00000000..c1cc9cb2 --- /dev/null +++ b/skyflow/service_account/client/auth_client.py @@ -0,0 +1,18 @@ +from skyflow.generated.rest import Configuration, ApiClient +from skyflow.generated.rest.api import AuthenticationApi + + +class AuthClient: + def __init__(self, url): + self.__url = url + self.__client_configuration = self.initialize_client_configuration() + self.__api_client = self.initialize_api_client() + + def initialize_client_configuration(self): + return Configuration(host=self.__url) + + def initialize_api_client(self): + return ApiClient(self.__client_configuration) + + def get_auth_api(self): + return AuthenticationApi(self.__api_client) \ No newline at end of file diff --git a/skyflow/utils/__init__.py b/skyflow/utils/__init__.py new file mode 100644 index 00000000..d5b072f4 --- /dev/null +++ b/skyflow/utils/__init__.py @@ -0,0 +1,5 @@ +from ..utils.enums import LogLevel, Env +from ._skyflow_messages import SkyflowMessages +from ._version import SDK_VERSION +from ._helpers import get_base_url, format_scope +from ._utils import get_credentials, get_vault_url, construct_invoke_connection_request, get_metrics, parse_insert_response, handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_get_response, parse_invoke_connection_response, validate_api_key, encode_column_values diff --git a/skyflow/utils/_helpers.py b/skyflow/utils/_helpers.py new file mode 100644 index 00000000..97eecabc --- /dev/null +++ b/skyflow/utils/_helpers.py @@ -0,0 +1,11 @@ +from urllib.parse import urlparse + +def get_base_url(url): + parsed_url = urlparse(url) + base_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + return base_url + +def format_scope(scopes): + if not scopes: + return None + return " ".join([f"role:{scope}" for scope in scopes]) \ No newline at end of file diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py new file mode 100644 index 00000000..954c5e14 --- /dev/null +++ b/skyflow/utils/_skyflow_messages.py @@ -0,0 +1,295 @@ +from enum import Enum + +from ._version import SDK_VERSION + +error_prefix = f"Skyflow Python SDK {SDK_VERSION}" +INFO = "INFO" +ERROR = "ERROR" + +class SkyflowMessages: + class ErrorCodes(Enum): + INVALID_INPUT = 400 + INVALID_INDEX = 404 + SERVER_ERROR = 500 + PARTIAL_SUCCESS = 500 + TOKENS_GET_COLUMN_NOT_SUPPORTED = 400 + REDACTION_WITH_TOKENS_NOT_SUPPORTED = 400 + + class Error(Enum): + EMPTY_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id." + INVALID_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id as a string." + EMPTY_CLUSTER_ID = f"{error_prefix} Initialization failed. Invalid cluster Id for vault with id {{}}. Specify a valid cluster Id." + INVALID_CLUSTER_ID = f"{error_prefix} Initialization failed. Invalid cluster Id for vault with id {{}}. Specify cluster Id as a string." + INVALID_ENV = f"{error_prefix} Initialization failed. Invalid env for vault with id {{}}. Specify a valid env." + INVALID_KEY = f"{error_prefix} Initialization failed. Invalid {{}}. Specify a valid key" + VAULT_ID_NOT_IN_CONFIG_LIST = f"{error_prefix} Validation error. Vault id {{}} is missing from the config. Specify the vault id from configs." + EMPTY_VAULT_CONFIGS = f"{error_prefix} Validation error. Specify at least one vault config." + EMPTY_CONNECTION_CONFIGS = f"{error_prefix} Validation error. Specify at least one connection config." + VAULT_ID_ALREADY_EXISTS =f"{error_prefix} Initialization failed. vault with id {{}} already exists." + CONNECTION_ID_ALREADY_EXISTS = f"{error_prefix} Initialization failed. Connection with id {{}} already exists." + + EMPTY_CREDENTIALS = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Credentials must not be empty." + INVALID_CREDENTIALS_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify a valid credentials." + INVALID_CREDENTIALS = f"{error_prefix} Validation error. Invalid credentials. Specify a valid credentials." + MULTIPLE_CREDENTIALS_PASSED_IN_CONFIG = f"{error_prefix} Validation error. Multiple credentials provided for {{}} with id {{}}. Please specify only one valid credential." + MULTIPLE_CREDENTIALS_PASSED = f"{error_prefix} Validation error. Multiple credentials provided. Please specify only one valid credential." + EMPTY_CREDENTIALS_STRING_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify valid credentials." + EMPTY_CREDENTIALS_STRING = f"{error_prefix} Validation error. Invalid credentials. Specify valid credentials." + INVALID_CREDENTIALS_STRING_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify credentials as a string." + INVALID_CREDENTIALS_STRING = f"{error_prefix} Validation error. Invalid credentials. Specify credentials as a string." + EMPTY_CREDENTIAL_FILE_PATH_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials for {{}} with id {{}}. Specify a valid file path." + EMPTY_CREDENTIAL_FILE_PATH = f"{error_prefix} Initialization failed. Invalid credentials. Specify a valid file path." + INVALID_CREDENTIAL_FILE_PATH_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials for {{}} with id {{}}. Expected file path to be a string." + INVALID_CREDENTIAL_FILE_PATH = f"{error_prefix} Initialization failed. Invalid credentials. Expected file path to be a string." + EMPTY_CREDENTIALS_TOKEN_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid token for {{}} with id {{}}.Specify a valid credentials token." + EMPTY_CREDENTIALS_TOKEN = f"{error_prefix} Initialization failed. Invalid token.Specify a valid credentials token." + INVALID_CREDENTIALS_TOKEN_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials token for {{}} with id {{}}. Expected token to be a string." + INVALID_CREDENTIALS_TOKEN = f"{error_prefix} Initialization failed. Invalid credentials token. Expected token to be a string." + EXPIRED_TOKEN = f"${error_prefix} Initialization failed. Given token is expired. Specify a valid credentials token." + EMPTY_API_KEY_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid api key for {{}} with id {{}}.Specify a valid api key." + EMPTY_API_KEY= f"{error_prefix} Initialization failed. Invalid api key.Specify a valid api key." + INVALID_API_KEY_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid api key for {{}} with id {{}}. Expected api key to be a string." + INVALID_API_KEY = f"{error_prefix} Initialization failed. Invalid api key. Expected api key to be a string." + INVALID_ROLES_KEY_TYPE_IN_CONFIG = f"{error_prefix} Validation error. Invalid roles for {{}} with id {{}}. Specify roles as an array." + INVALID_ROLES_KEY_TYPE = f"{error_prefix} Validation error. Invalid roles. Specify roles as an array." + EMPTY_ROLES_IN_CONFIG = f"{error_prefix} Validation error. Invalid roles for {{}} with id {{}}. Specify at least one role." + EMPTY_ROLES = f"{error_prefix} Validation error. Invalid roles. Specify at least one role." + EMPTY_CONTEXT_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid context provided for {{}} with id {{}}. Specify context as type Context." + EMPTY_CONTEXT = f"{error_prefix} Initialization failed. Invalid context provided. Specify context as type Context." + INVALID_CONTEXT_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid context for {{}} with id {{}}. Specify a valid context." + INVALID_CONTEXT = f"{error_prefix} Initialization failed. Invalid context. Specify a valid context." + INVALID_LOG_LEVEL = f"{error_prefix} Initialization failed. Invalid log level. Specify a valid log level." + EMPTY_LOG_LEVEL = f"{error_prefix} Initialization failed. Specify a valid log level." + + EMPTY_CONNECTION_ID = f"{error_prefix} Initialization failed. Invalid connection Id. Specify a valid connection Id." + INVALID_CONNECTION_ID = f"{error_prefix} Initialization failed. Invalid connection Id. Specify connection Id as a string." + EMPTY_CONNECTION_URL = f"{error_prefix} Initialization failed. Invalid connection Url for connection with id {{}}. Specify a valid connection Url." + INVALID_CONNECTION_URL = f"{error_prefix} Initialization failed. Invalid connection Url for connection with id {{}}. Specify connection Url as a string." + CONNECTION_ID_NOT_IN_CONFIG_LIST = f"{error_prefix} Validation error. {{}} is missing from the config. Specify the connectionIds from config." + RESPONSE_NOT_JSON = f"{error_prefix} Response {{}} is not valid JSON." + API_ERROR = f"{error_prefix} Server returned status code {{}}" + + MISSING_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Table name cannot be empty in insert request. Specify a table name." + INVALID_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Invalid table name in insert request. Specify a valid table name." + INVALID_TYPE_OF_DATA_IN_INSERT = f"{error_prefix} Validation error. Invalid type of data in insert request. Specify data as a object array." + EMPTY_DATA_IN_INSERT = f"{error_prefix} Validation error. Data array cannot be empty. Specify data in insert request." + INVALID_UPSERT_OPTIONS_TYPE = f"{error_prefix} Validation error. 'upsert' key cannot be empty in options. At least one object of table and column is required." + INVALID_HOMOGENEOUS_TYPE = f"{error_prefix} Validation error. Invalid type of homogeneous. Specify homogeneous as a string." + INVALID_TOKEN_MODE_TYPE = f"{error_prefix} Validation error. Invalid type of token mode. Specify token mode as a TokenMode enum." + INVALID_RETURN_TOKENS_TYPE = f"{error_prefix} Validation error. Invalid type of return tokens. Specify return tokens as a boolean." + INVALID_CONTINUE_ON_ERROR_TYPE = f"{error_prefix} Validation error. Invalid type of continue on error. Specify continue on error as a boolean." + TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE = f"{error_prefix} Validation error. 'token_mode' wasn't specified. Set 'token_mode' to 'ENABLE' to insert tokens." + INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT = f"{error_prefix} Validation error. 'token_mode' is set to 'ENABLE_STRICT', but some fields are missing tokens. Specify tokens for all fields." + NO_TOKENS_IN_INSERT = f"{error_prefix} Validation error. Tokens weren't specified for records while 'token_strict' was {{}}. Specify tokens." + BATCH_INSERT_FAILURE = f"{error_prefix} Insert operation failed." + GET_FAILURE = f"{error_prefix} Get operation failed." + HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT = f"{error_prefix} Validation error. Homogenous is not supported when upsert is passed." + + EMPTY_TABLE_VALUE = f"{error_prefix} Validation error. 'table' can't be empty. Specify a table." + INVALID_TABLE_VALUE = f"{error_prefix} Validation error. Invalid type of table. Specify table as a string" + EMPTY_RECORD_IDS_IN_DELETE = f"{error_prefix} Validation error. 'record ids' array can't be empty. Specify one or more record ids." + BULK_DELETE_FAILURE = f"{error_prefix} Delete operation failed." + + INVALID_QUERY_TYPE = f"{error_prefix} Validation error. Query parameter is of type {{}}. Specify as a string." + EMPTY_QUERY = f"{error_prefix} Validation error. Query parameter can't be empty. Specify as a string." + INVALID_QUERY_COMMAND = f"{error_prefix} Validation error. {{}} command was passed instead, but only SELECT commands are supported. Specify the SELECT command." + SERVER_ERROR = f"{error_prefix} Validation error. Check SkyflowError.data for details." + QUERY_FAILED = f"{error_prefix} Query operation failed." + DETOKENIZE_FIELD = f"{error_prefix} Detokenize operation failed." + UPDATE_FAILED = f"{error_prefix} Update operation failed." + TOKENIZE_FAILED = f"{error_prefix} Tokenize operation failed." + INVOKE_CONNECTION_FAILED = f"{error_prefix} Invoke Connection operation failed." + + INVALID_IDS_TYPE = f"{error_prefix} Validation error. 'ids' has a value of type {{}}. Specify 'ids' as list." + INVALID_REDACTION_TYPE = f"{error_prefix} Validation error. 'redaction' has a value of type {{}}. Specify 'redaction' as type Skyflow.Redaction." + INVALID_COLUMN_NAME = f"{error_prefix} Validation error. 'column' has a value of type {{}}. Specify 'column' as a string." + INVALID_COLUMN_VALUE = f"{error_prefix} Validation error. columnValues key has a value of type {{}}. Specify columnValues key as list." + INVALID_FIELDS_VALUE = f"{error_prefix} Validation error. fields key has a value of type{{}}. Specify fields key as list." + BOTH_OFFSET_AND_LIMIT_SPECIFIED = f"${error_prefix} Validation error. Both offset and limit cannot be present at the same time" + INVALID_OFF_SET_VALUE = f"{error_prefix} Validation error. offset key has a value of type {{}}. Specify offset key as integer." + INVALID_LIMIT_VALUE = f"{error_prefix} Validation error. limit key has a value of type {{}}. Specify limit key as integer." + INVALID_DOWNLOAD_URL_VALUE = f"{error_prefix} Validation error. download_url key has a value of type {{}}. Specify download_url key as boolean." + REDACTION_WITH_TOKENS_NOT_SUPPORTED = f"{error_prefix} Validation error. 'redaction' can't be used when tokens are specified. Remove 'redaction' from payload if tokens are specified." + TOKENS_GET_COLUMN_NOT_SUPPORTED = f"{error_prefix} Validation error. Column name and/or column values can't be used when tokens are specified. Remove unique column values or tokens from the payload." + BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED = f"{error_prefix} Validation error. Both Skyflow IDs and column details can't be specified. Either specify Skyflow IDs or unique column details." + INVALID_ORDER_BY_VALUE = f"{error_prefix} Validation error. order_by key has a value of type {{}}. Specify order_by key as Skyflow.OrderBy" + + UPDATE_FIELD_KEY_ERROR = f"{error_prefix} Validation error. Fields are empty in an update payload. Specify at least one field." + INVALID_FIELDS_TYPE = f"{error_prefix} Validation error. The 'data' key has a value of type {{}}. Specify 'data' as a dictionary." + IDS_KEY_ERROR = f"{error_prefix} Validation error. 'ids' key is missing from the payload. Specify an 'ids' key." + INVALID_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. The 'tokens' key has a value of type {{}}. Specify 'tokens' as a list." + EMPTY_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. Tokens are empty in detokenize payload. Specify at lease one token" + + INVALID_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. The 'values' key has a value of type {{}}. Specify 'tokenize_parameters' as a list." + EMPTY_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. Tokenize values are empty in tokenize payload. Specify at least one parameter." + INVALID_TOKENIZE_PARAMETER = f"{error_prefix} Validation error. Tokenize value at index {{}} has a value of type {{}}. Specify as a dictionary." + EMPTY_TOKENIZE_PARAMETER_VALUE = f"{error_prefix} Validation error. Tokenize value at index {{}} is empty. Specify a valid value." + EMPTY_TOKENIZE_PARAMETER_COLUMN_GROUP = f"{error_prefix} Validation error. Tokenize column group at index {{}} is empty. Specify a valid column group." + INVALID_TOKENIZE_PARAMETER_KEY = f"{error_prefix} Validation error. Tokenize value key at index {{}} is invalid. Specify a valid key value." + + INVALID_REQUEST_BODY = f"{error_prefix} Validation error. Invalid request body. Specify the request body as an object." + INVALID_REQUEST_HEADERS = f"{error_prefix} Validation error. Invalid request headers. Specify the request as an object." + INVALID_URL = f"{error_prefix} Validation error. Connection url {{}} is invalid. Specify a valid connection url." + INVALID_PATH_PARAMS = f"{error_prefix} Validation error. Path parameters aren't valid. Specify valid path parameters." + INVALID_QUERY_PARAMS = f"{error_prefix} Validation error. Query parameters aren't valid. Specify valid query parameters." + INVALID_REQUEST_METHOD = f"{error_prefix} Validation error. Invalid request method. Specify the request method as enum RequestMethod" + + MISSING_PRIVATE_KEY = f"{error_prefix} Initialization failed. Unable to read private key in credentials. Verify your private key." + MISSING_CLIENT_ID = f"{error_prefix} Initialization failed. Unable to read client ID in credentials. Verify your client ID." + MISSING_KEY_ID = f"{error_prefix} Initialization failed. Unable to read key ID in credentials. Verify your key ID." + MISSING_TOKEN_URI = f"{error_prefix} Initialization failed. Unable to read token URI in credentials. Verify your token URI." + JWT_INVALID_FORMAT = f"{error_prefix} Initialization failed. Invalid private key format. Verify your credentials." + JWT_DECODE_ERROR = f"{error_prefix} Validation error. Invalid access token. Verify your credentials." + FILE_INVALID_JSON = f"{error_prefix} Initialization failed. File at {{}} is not in valid JSON format. Verify the file contents." + INVALID_JSON_FORMAT_IN_CREDENTIALS_ENV = f"{error_prefix} Validation error. Invalid JSON format in SKYFLOW_CREDENTIALS environment variable." + + class Info(Enum): + CLIENT_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized skyflow client." + VALIDATING_VAULT_CONFIG = f"{INFO}: [{error_prefix}] Validating vault config." + VALIDATING_CONNECTION_CONFIG = f"{INFO}: [{error_prefix}] Validating connection config." + UNABLE_TO_GENERATE_SDK_METRIC = f"{INFO}: [{error_prefix}] Unable to generate {{}} metric." + VAULT_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized vault controller with vault ID {{}}." + CONNECTION_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized connection controller with connection ID {{}}." + VAULT_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} already exists." + VAULT_CONFIG_DOES_NOT_EXIST = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} doesn't exist." + CONNECTION_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Connection config with connection ID {{}} already exists." + CONNECTION_CONFIG_DOES_NOT_EXIST = f"{INFO}: [{error_prefix}] Connection config with connection ID {{}} doesn't exist." + LOGGER_SETUP_DONE = f"{INFO}: [{error_prefix}] Set up logger." + CURRENT_LOG_LEVEL = f"{INFO}: [{error_prefix}] Current log level is {{}}." + + BEARER_TOKEN_EXPIRED = f"{INFO}: [{error_prefix}] Bearer token is expired." + GET_BEARER_TOKEN_TRIGGERED = f"{INFO}: [{error_prefix}] generate_bearer_token method triggered." + GET_BEARER_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Bearer token generated." + GET_SIGNED_DATA_TOKENS_TRIGGERED = f"{INFO}: [{error_prefix}] generate_signed_data_tokens method triggered." + GET_SIGNED_DATA_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Signed data tokens generated." + GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED = f"{INFO}: [{error_prefix}] generate bearer_token_from_credential_string method triggered ." + REUSE_BEARER_TOKEN = f"{INFO}: [{error_prefix}] Reusing bearer token." + + + VALIDATE_INSERT_REQUEST = f"{INFO}: [{error_prefix}] Validating insert request." + INSERT_TRIGGERED = f"{INFO}: [{error_prefix}] Insert method triggered." + INSERT_SUCCESS = f"{INFO}: [{error_prefix}] Data inserted." + INSERT_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Insert request resolved." + + VALIDATE_UPDATE_REQUEST = f"{INFO}: [{error_prefix}] Validating update request." + UPDATE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Update request resolved." + UPDATE_SUCCESS = f"{INFO}: [{error_prefix}] Data updated." + UPDATE_TRIGGERED = f"{INFO}: [{error_prefix}] Update method triggered." + + DELETE_TRIGGERED = f"{INFO}: [{error_prefix}] Delete method triggered." + VALIDATING_DELETE_REQUEST = f"{INFO}: [{error_prefix}] Validating delete request." + DELETE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Delete request resolved." + DELETE_SUCCESS = f"{INFO}: [{error_prefix}] Data deleted." + + GET_TRIGGERED = f"{INFO}: [{error_prefix}] Get method triggered." + VALIDATE_GET_REQUEST = f"{INFO}: [{error_prefix}] Validating get request." + GET_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Get request resolved." + GET_SUCCESS = f"{INFO}: [{error_prefix}] Data revealed." + + QUERY_TRIGGERED = f"{INFO}: [{error_prefix}] Query method triggered." + VALIDATING_QUERY_REQUEST = f"{INFO}: [{error_prefix}] Validating query request." + QUERY_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Query request resolved." + QUERY_SUCCESS = f"{INFO}: [{error_prefix}] Query executed." + + DETOKENIZE_TRIGGERED = f"{INFO}: [{error_prefix}] Detokenize method triggered." + VALIDATE_DETOKENIZE_REQUEST = f"{INFO}: [{error_prefix}] Validating detokenize request." + DETOKENIZE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Detokenize request resolved." + DETOKENIZE_SUCCESS = f"{INFO}: [{error_prefix}] Data detokenized." + + TOKENIZE_TRIGGERED = f"{INFO}: [{error_prefix}] Tokenize method triggered." + VALIDATING_TOKENIZE_REQUEST = f"{INFO}: [{error_prefix}] Validating tokenize request." + TOKENIZE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Tokenize request resolved." + TOKENIZE_SUCCESS = f"{INFO}: [{error_prefix}] Data tokenized." + + INVOKE_CONNECTION_TRIGGERED = f"{INFO}: [{error_prefix}] Invoke connection method triggered." + VALIDATING_INVOKE_CONNECTION_REQUEST = f"{INFO}: [{error_prefix}] Validating invoke connection request." + INVOKE_CONNECTION_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Invoke connection request resolved." + INVOKE_CONNECTION_SUCCESS = f"{INFO}: [{error_prefix}] Invoke Connection Success." + + class ErrorLogs(Enum): + VAULTID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID is required." + EMPTY_VAULTID = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID can not be empty." + CLUSTER_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Cluster ID is required." + EMPTY_CLUSTER_ID = f"{ERROR}: [{error_prefix}] Invalid vault config. Cluster ID can not be empty." + ENV_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Env is required." + CONNECTION_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection ID is required." + EMPTY_CONNECTION_ID = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection ID can not be empty." + CONNECTION_URL_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL is required." + EMPTY_CONNECTION_URL = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL can not be empty." + INVALID_CONNECTION_URL = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL is not a valid URL." + EMPTY_CREDENTIALS_PATH = f"{ERROR}: [{error_prefix}] Invalid credentials. Credentials path can not be empty." + EMPTY_CREDENTIALS_STRING = f"{ERROR}: [{error_prefix}] Invalid credentials. Credentials string can not be empty." + EMPTY_TOKEN_VALUE = f"{ERROR}: [{error_prefix}] Invalid credentials. Token can not be empty." + EMPTY_API_KEY_VALUE = f"{ERROR}: [{error_prefix}] Invalid credentials. Api key can not be empty." + INVALID_API_KEY = f"{ERROR}: [{error_prefix}] Invalid credentials. Api key is invalid." + + INVALID_BEARER_TOKEN = f"{ERROR}: [{error_prefix}] Bearer token is invalid or expired." + INVALID_CREDENTIALS_FILE = f"{ERROR}: [{error_prefix}] Credentials file is either null or an invalid file." + INVALID_CREDENTIALS_STRING_FORMAT = f"{ERROR}: [{error_prefix}] Credentials string in not in a valid JSON string format." + PRIVATE_KEY_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Private key is required." + CLIENT_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Client ID is required." + KEY_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Key ID is required." + TOKEN_URI_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Token URI is required." + INVALID_TOKEN_URI = f"{ERROR}: [{error_prefix}] Invalid value for token URI in credentials." + + + TABLE_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Table is required." + EMPTY_TABLE_NAME =f"{ERROR}: [{error_prefix}] Invalid {{}} request. Table name can not be empty." + VALUES_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Values are required." + EMPTY_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Values can not be empty." + EMPTY_OR_NULL_VALUE_IN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Value can not be null or empty in values for key {{}}." + EMPTY_OR_NULL_KEY_IN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Key can not be null or empty in values." + EMPTY_UPSERT = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Upsert can not be empty." + HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Homogenous is not supported when upsert is passed." + EMPTY_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens can not be empty." + EMPTY_OR_NULL_VALUE_IN_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Value can not be null or empty in tokens for key {{}}." + EMPTY_OR_NULL_KEY_IN_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Key can not be null or empty in tokens." + MISMATCH_OF_FIELDS_AND_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Keys for values and tokens are not matching." + + EMPTY_IDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Ids can not be empty." + EMPTY_OR_NULL_ID_IN_IDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Id can not be null or empty in ids at index {{}}." + TOKENIZATION_NOT_SUPPORTED_WITH_REDACTION= f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokenization is not supported when redaction is applied." + TOKENIZATION_SUPPORTED_ONLY_WITH_IDS=f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokenization is not supported when column name and values are passed." + TOKENS_NOT_ALLOWED_WITH_BYOT_DISABLE = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens are not allowed when token_strict is DISABLE." + INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT =f"{ERROR}: [{error_prefix}] Invalid {{}} request. For tokenStrict as ENABLE_STRICT, tokens should be passed for all fields." + TOKENS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens are required." + EMPTY_FIELDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Fields can not be empty." + EMPTY_OFFSET = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Offset ca not be empty." + NEITHER_IDS_NOR_COLUMN_NAME_PASSED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Neither ids nor column name and values are passed." + BOTH_IDS_AND_COLUMN_NAME_PASSED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Both ids and column name and values are passed." + COLUMN_NAME_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column name is required when column values are passed." + COLUMN_VALUES_IS_REQUIRED_GET = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column values are required when column name is passed." + SKYFLOW_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Skyflow Id is required." + EMPTY_SKYFLOW_ID = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Skyflow Id can not be empty." + + COLUMN_VALUES_IS_REQUIRED_TOKENIZE = f"{ERROR}: [{error_prefix}] Invalid {{}} request. ColumnValues are required." + EMPTY_COLUMN_GROUP_IN_COLUMN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column group can not be null or empty in column values at index %s2." + + EMPTY_QUERY= f"{ERROR}: [{error_prefix}] Invalid {{}} request. Query can not be empty." + QUERY_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Query is required." + + INSERT_RECORDS_REJECTED = f"{ERROR}: [{error_prefix}] Insert call resulted in failure." + DETOKENIZE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Detokenize request resulted in failure." + DELETE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Delete request resulted in failure." + TOKENIZE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Tokenize request resulted in failure." + UPDATE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Update request resulted in failure." + QUERY_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Query request resulted in failure." + GET_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Get request resulted in failure." + + class Interface(Enum): + INSERT = "INSERT" + GET = "GET" + QUERY = "QUERY" + DETOKENIZE = " DETOKENIZE" + TOKENIZE = "TOKENIZE" + UPDATE = "UPDATE" + DELETE = "DELETE" + + class HttpStatus(Enum): + BAD_REQUEST = "Bad Request" + + class Warning(Enum): + WARNING_MESSAGE = "WARNING MESSAGE" + + + diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py new file mode 100644 index 00000000..5002956a --- /dev/null +++ b/skyflow/utils/_utils.py @@ -0,0 +1,392 @@ +import os +import json +import urllib.parse +from dotenv import load_dotenv +import dotenv +from requests.sessions import PreparedRequest +from requests.models import HTTPError +import requests +import platform +import sys +import re +from urllib.parse import quote +from skyflow.error import SkyflowError +from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ + V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse +from skyflow.utils.logger import log_error, log_error_log +from . import SkyflowMessages, SDK_VERSION +from .enums import Env, ContentType, EnvUrls +from skyflow.vault.data import InsertResponse, UpdateResponse, DeleteResponse, QueryResponse, GetResponse +from .validations import validate_invoke_connection_params +from ..vault.connection import InvokeConnectionResponse +from ..vault.tokens import DetokenizeResponse, TokenizeResponse + +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def get_credentials(config_level_creds = None, common_skyflow_creds = None, logger = None): + dotenv.load_dotenv() + dotenv_path = dotenv.find_dotenv(usecwd=True) + if dotenv_path: + load_dotenv(dotenv_path) + env_skyflow_credentials = os.getenv("SKYFLOW_CREDENTIALS") + if config_level_creds: + return config_level_creds + if common_skyflow_creds: + return common_skyflow_creds + if env_skyflow_credentials: + env_skyflow_credentials.strip() + try: + env_creds = env_skyflow_credentials.replace('\n', '\\n') + return { + 'credentials_string': env_creds + } + except json.JSONDecodeError: + raise SkyflowError(SkyflowMessages.Error.INVALID_JSON_FORMAT_IN_CREDENTIALS_ENV.value, invalid_input_error_code) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS.value, invalid_input_error_code) + +def validate_api_key(api_key: str, logger = None) -> bool: + if len(api_key) != 42: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_API_KEY.value, logger = logger) + return False + api_key_pattern = re.compile(r'^sky-[a-zA-Z0-9]{5}-[a-fA-F0-9]{32}$') + + return bool(api_key_pattern.match(api_key)) + +def get_vault_url(cluster_id, env,vault_id, logger = None): + if not cluster_id or not isinstance(cluster_id, str) or not cluster_id.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id), invalid_input_error_code) + + if env not in Env: + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + base_url = EnvUrls[env.name].value + protocol = "https" if env != Env.PROD else "http" + + return f"{protocol}://{cluster_id}.{base_url}" + +def parse_path_params(url, path_params): + result = url + for param, value in path_params.items(): + result = result.replace('{' + param + '}', value) + + return result + +def to_lowercase_keys(dict): + result = {} + for key, value in dict.items(): + result[key.lower()] = value + + return result + +def construct_invoke_connection_request(request, connection_url, logger) -> PreparedRequest: + url = parse_path_params(connection_url.rstrip('/'), request.path_params) + + try: + if isinstance(request.headers, dict): + header = to_lowercase_keys(json.loads( + json.dumps(request.headers))) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value, invalid_input_error_code) + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value, invalid_input_error_code) + + if not 'Content-Type'.lower() in header: + header['content-type'] = ContentType.JSON.value + + try: + if isinstance(request.body, dict): + json_data, files = get_data_from_content_type( + request.body, header["content-type"] + ) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_BODY.value, invalid_input_error_code) + except Exception as e: + raise SkyflowError( SkyflowMessages.Error.INVALID_REQUEST_BODY.value, invalid_input_error_code) + + validate_invoke_connection_params(logger, request.query_params, request.path_params) + + if not hasattr(request.method, 'value'): + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_METHOD.value, invalid_input_error_code) + + try: + return requests.Request( + method = request.method.value, + url = url, + data = json_data, + headers = header, + params = request.query_params, + files = files + ).prepare() + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_URL.value.format(connection_url), invalid_input_error_code) + + +def http_build_query(data): + return urllib.parse.urlencode(r_urlencode(list(), dict(), data)) + +def r_urlencode(parents, pairs, data): + if isinstance(data, list) or isinstance(data, tuple): + for i in range(len(data)): + parents.append(i) + r_urlencode(parents, pairs, data[i]) + parents.pop() + elif isinstance(data, dict): + for key, value in data.items(): + parents.append(key) + r_urlencode(parents, pairs, value) + parents.pop() + else: + pairs[render_key(parents)] = str(data) + + return pairs + +def render_key(parents): + depth, out_str = 0, '' + for x in parents: + s = "[%s]" if depth > 0 or isinstance(x, int) else "%s" + out_str += s % str(x) + depth += 1 + return out_str + +def get_data_from_content_type(data, content_type): + converted_data = data + files = {} + if content_type == ContentType.URLENCODED.value: + converted_data = http_build_query(data) + elif content_type == ContentType.FORMDATA.value: + converted_data = r_urlencode(list(), dict(), data) + files = {(None, None)} + elif content_type == ContentType.JSON.value: + converted_data = json.dumps(data) + + return converted_data, files + + +def get_metrics(): + sdk_name_version = "skyflow-python@" + SDK_VERSION + + try: + sdk_client_device_model = platform.node() + except Exception: + sdk_client_device_model = "" + + try: + sdk_client_os_details = sys.platform + except Exception: + sdk_client_os_details = "" + + try: + sdk_runtime_details = sys.version + except Exception: + sdk_runtime_details = "" + + details_dic = { + 'sdk_name_version': sdk_name_version, + 'sdk_client_device_model': sdk_client_device_model, + 'sdk_client_os_details': sdk_client_os_details, + 'sdk_runtime_details': "Python " + sdk_runtime_details, + } + return details_dic + + +def parse_insert_response(api_response, continue_on_error): + inserted_fields = [] + errors = [] + insert_response = InsertResponse() + if continue_on_error: + for idx, response in enumerate(api_response.responses): + if response['Status'] == 200: + body = response['Body'] + if 'records' in body: + for record in body['records']: + inserted_field = { + 'skyflow_id': record['skyflow_id'], + 'request_index': idx + } + + if 'tokens' in record: + inserted_field.update(record['tokens']) + inserted_fields.append(inserted_field) + elif response['Status'] == 400: + error = { + 'request_index': idx, + 'error': response['Body']['error'] + } + errors.append(error) + + insert_response.inserted_fields = inserted_fields + insert_response.errors = errors + + else: + for record in api_response.records: + field_data = { + 'skyflow_id': record.skyflow_id + } + + if record.tokens: + field_data.update(record.tokens) + + inserted_fields.append(field_data) + insert_response.inserted_fields = inserted_fields + + return insert_response + +def parse_update_record_response(api_response: V1UpdateRecordResponse): + update_response = UpdateResponse() + updated_field = dict() + updated_field['skyflow_id'] = api_response.skyflow_id + if api_response.tokens is not None: + updated_field.update(api_response.tokens) + + update_response.updated_field = updated_field + + return update_response + +def parse_delete_response(api_response: V1BulkDeleteRecordResponse): + delete_response = DeleteResponse() + deleted_ids = api_response.record_id_response + delete_response.deleted_ids = deleted_ids + delete_response.errors = [] + return delete_response + + +def parse_get_response(api_response: V1BulkGetRecordResponse): + get_response = GetResponse() + data = [] + errors = [] + for record in api_response.records: + field_data = {field: value for field, value in record.fields.items()} + data.append(field_data) + + get_response.data = data + get_response.errors = errors + + return get_response + +def parse_detokenize_response(api_response: V1DetokenizeResponse): + detokenized_fields = [] + errors = [] + + for record in api_response.records: + if record.error: + errors.append({ + "token": record.token, + "error": record.error + }) + else: + value_type = record.value_type.value if record.value_type else None + detokenized_fields.append({ + "token": record.token, + "value": record.value, + "type": value_type + }) + + detokenized_fields = detokenized_fields + errors = errors + detokenize_response = DetokenizeResponse() + detokenize_response.detokenized_fields = detokenized_fields + detokenize_response.errors = errors + + return detokenize_response + +def parse_tokenize_response(api_response: V1TokenizeResponse): + tokenize_response = TokenizeResponse() + tokenized_fields = [{"token": record.token} for record in api_response.records] + + tokenize_response.tokenized_fields = tokenized_fields + + return tokenize_response + +def parse_query_response(api_response: V1GetQueryResponse): + query_response = QueryResponse() + fields = [] + for record in api_response.records: + field_object = { + **record.fields, + "tokenized_data": {} + } + fields.append(field_object) + query_response.fields = fields + return query_response + +def parse_invoke_connection_response(api_response: requests.Response): + invoke_connection_response = InvokeConnectionResponse() + + status_code = api_response.status_code + content = api_response.content + if isinstance(content, bytes): + content = content.decode('utf-8') + try: + api_response.raise_for_status() + try: + json_content = json.loads(content) + if 'x-request-id' in api_response.headers: + request_id = api_response.headers['x-request-id'] + json_content['request_id'] = request_id + + invoke_connection_response.response = json_content + return invoke_connection_response + except: + raise SkyflowError(SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content), status_code) + except HTTPError: + message = SkyflowMessages.Error.API_ERROR.value.format(status_code) + if api_response and api_response.content: + try: + error_response = json.loads(content) + if isinstance(error_response.get('error'), dict) and 'message' in error_response['error']: + message = error_response['error']['message'] + except json.JSONDecodeError: + message = SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content) + + if 'x-request-id' in api_response.headers: + message += ' - request id: ' + api_response.headers['x-request-id'] + + raise SkyflowError(message, status_code) + + +def log_and_reject_error(description, status_code, request_id, http_status=None, grpc_code=None, details=None, logger = None): + raise SkyflowError(description, status_code, request_id, grpc_code, http_status, details) + +def handle_exception(error, logger): + request_id = error.headers.get('x-request-id', 'unknown-request-id') + content_type = error.headers.get('content-type') + data = error.body + + if content_type: + if 'application/json' in content_type: + handle_json_error(error, data, request_id, logger) + elif 'text/plain' in content_type: + handle_text_error(error, data, request_id, logger) + else: + handle_generic_error(error, request_id, logger) + else: + handle_generic_error(error, request_id, logger) + +def handle_json_error(err, data, request_id, logger): + try: + description = json.loads(data) + status_code = description.get('error', {}).get('http_code', 500) # Default to 500 if not found + http_status = description.get('error', {}).get('http_status') + grpc_code = description.get('error', {}).get('grpc_code') + details = description.get('error', {}).get('details') + + description_message = description.get('error', {}).get('message', "An unknown error occurred.") + log_and_reject_error(description_message, status_code, request_id, http_status, grpc_code, details, logger = logger) + except json.JSONDecodeError: + log_and_reject_error("Invalid JSON response received.", err, request_id, logger = logger) + +def handle_text_error(err, data, request_id, logger): + log_and_reject_error(data, err.status, request_id, logger = logger) + +def handle_generic_error(err, request_id, logger): + description = "An error occurred." + log_and_reject_error(description, err.status, request_id, logger = logger) + + +def encode_column_values(get_request): + encoded_column_values = list() + for column in get_request.column_values: + encoded_column_values.append(quote(column)) + + return encoded_column_values diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py new file mode 100644 index 00000000..112e619d --- /dev/null +++ b/skyflow/utils/_version.py @@ -0,0 +1 @@ +SDK_VERSION = '1.15.1.dev0+9eff324' \ No newline at end of file diff --git a/skyflow/utils/enums/__init__.py b/skyflow/utils/enums/__init__.py new file mode 100644 index 00000000..5456737b --- /dev/null +++ b/skyflow/utils/enums/__init__.py @@ -0,0 +1,6 @@ +from .env import Env, EnvUrls +from .log_level import LogLevel +from .content_types import ContentType +from .token_mode import TokenMode +from .request_method import RequestMethod +from .redaction_type import RedactionType \ No newline at end of file diff --git a/skyflow/utils/enums/content_types.py b/skyflow/utils/enums/content_types.py new file mode 100644 index 00000000..362c286a --- /dev/null +++ b/skyflow/utils/enums/content_types.py @@ -0,0 +1,8 @@ +from enum import Enum + +class ContentType(Enum): + JSON = 'application/json' + PLAINTEXT = 'text/plain' + XML = 'text/xml' + URLENCODED = 'application/x-www-form-urlencoded' + FORMDATA = 'multipart/form-data' \ No newline at end of file diff --git a/skyflow/utils/enums/env.py b/skyflow/utils/enums/env.py new file mode 100644 index 00000000..862f8f8a --- /dev/null +++ b/skyflow/utils/enums/env.py @@ -0,0 +1,13 @@ +from enum import Enum + +class Env(Enum): + DEV = 'DEV', + SANDBOX = 'SANDBOX', + PROD = 'PROD' + STAGE = 'STAGE' + +class EnvUrls(Enum): + PROD = "vault.skyflowapis.com", + SANDBOX = "vault.skyflowapis-preview.com", + DEV = "vault.skyflowapis.dev" + STAGE = "vault.skyflowapis.tech" \ No newline at end of file diff --git a/skyflow/utils/enums/log_level.py b/skyflow/utils/enums/log_level.py new file mode 100644 index 00000000..c92e9149 --- /dev/null +++ b/skyflow/utils/enums/log_level.py @@ -0,0 +1,8 @@ +from enum import Enum + +class LogLevel(Enum): + DEBUG = 1 + INFO = 2 + WARN = 3 + ERROR = 4 + OFF = 5 diff --git a/skyflow/utils/enums/redaction_type.py b/skyflow/utils/enums/redaction_type.py new file mode 100644 index 00000000..85310048 --- /dev/null +++ b/skyflow/utils/enums/redaction_type.py @@ -0,0 +1,8 @@ +from enum import Enum +from skyflow.generated.rest import RedactionEnumREDACTION + +class RedactionType(Enum): + PLAIN_TEXT = RedactionEnumREDACTION.PLAIN_TEXT + MASKED = RedactionEnumREDACTION.MASKED + DEFAULT = RedactionEnumREDACTION.DEFAULT + REDACTED = RedactionEnumREDACTION.REDACTED diff --git a/skyflow/utils/enums/request_method.py b/skyflow/utils/enums/request_method.py new file mode 100644 index 00000000..61efef3d --- /dev/null +++ b/skyflow/utils/enums/request_method.py @@ -0,0 +1,8 @@ +from enum import Enum + +class RequestMethod(Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + NONE = "NONE" \ No newline at end of file diff --git a/skyflow/utils/enums/token_mode.py b/skyflow/utils/enums/token_mode.py new file mode 100644 index 00000000..650f9a96 --- /dev/null +++ b/skyflow/utils/enums/token_mode.py @@ -0,0 +1,7 @@ +from enum import Enum +from skyflow.generated.rest import V1BYOT + +class TokenMode(Enum): + DISABLE = V1BYOT.DISABLE + ENABLE = V1BYOT.ENABLE + ENABLE_STRICT = V1BYOT.ENABLE_STRICT \ No newline at end of file diff --git a/skyflow/utils/logger/__init__.py b/skyflow/utils/logger/__init__.py new file mode 100644 index 00000000..2993b8fc --- /dev/null +++ b/skyflow/utils/logger/__init__.py @@ -0,0 +1,2 @@ +from ._logger import Logger +from ._log_helpers import log_error, log_info, log_error_log \ No newline at end of file diff --git a/skyflow/utils/logger/_log_helpers.py b/skyflow/utils/logger/_log_helpers.py new file mode 100644 index 00000000..fdb11ea9 --- /dev/null +++ b/skyflow/utils/logger/_log_helpers.py @@ -0,0 +1,34 @@ +from ..enums import LogLevel +from . import Logger + + +def log_info(message, logger = None): + if not logger: + logger = Logger(LogLevel.INFO) + + logger.info(message) + +def log_error_log(message, logger=None): + if not logger: + logger = Logger(LogLevel.ERROR) + logger.error(message) + +def log_error(message, http_code, request_id=None, grpc_code=None, http_status=None, details=None, logger=None): + if not logger: + logger = Logger(LogLevel.ERROR) + + log_data = { + 'http_code': http_code, + 'message': message + } + + if grpc_code is not None: + log_data['grpc_code'] = grpc_code + if http_status is not None: + log_data['http_status'] = http_status + if request_id is not None: + log_data['request_id'] = request_id + if details is not None: + log_data['details'] = details + + logger.error(log_data) \ No newline at end of file diff --git a/skyflow/utils/logger/_logger.py b/skyflow/utils/logger/_logger.py new file mode 100644 index 00000000..45519fb1 --- /dev/null +++ b/skyflow/utils/logger/_logger.py @@ -0,0 +1,50 @@ +import logging +from ..enums.log_level import LogLevel + + +class Logger: + def __init__(self, level=LogLevel.ERROR): + self.current_level = level + self.logger = logging.getLogger('skyflow-python') + self.logger.propagate = False # Prevent logs from being handled by parent loggers + + # Remove any existing handlers to avoid duplicates or inherited handlers + if self.logger.hasHandlers(): + self.logger.handlers.clear() + + self.set_log_level(level) + + handler = logging.StreamHandler() + + # Create a formatter that only includes the message without any prefixes + formatter = logging.Formatter('%(message)s') + handler.setFormatter(formatter) + + self.logger.addHandler(handler) + + def set_log_level(self, level): + self.current_level = level + log_level_mapping = { + LogLevel.DEBUG: logging.DEBUG, + LogLevel.INFO: logging.INFO, + LogLevel.WARN: logging.WARNING, + LogLevel.ERROR: logging.ERROR, + LogLevel.OFF: logging.CRITICAL + 1 + } + self.logger.setLevel(log_level_mapping[level]) + + def debug(self, message): + if self.current_level.value <= LogLevel.DEBUG.value: + self.logger.debug(message) + + def info(self, message): + if self.current_level.value <= LogLevel.INFO.value: + self.logger.info(message) + + def warn(self, message): + if self.current_level.value <= LogLevel.WARN.value: + self.logger.warning(message) + + def error(self, message): + if self.current_level.value <= LogLevel.ERROR.value: + self.logger.error(message) diff --git a/skyflow/utils/validations/__init__.py b/skyflow/utils/validations/__init__.py new file mode 100644 index 00000000..17bc49a7 --- /dev/null +++ b/skyflow/utils/validations/__init__.py @@ -0,0 +1,16 @@ +from ._validations import ( + validate_vault_config, + validate_insert_request, + validate_connection_config, + validate_update_vault_config, + validate_update_connection_config, + validate_credentials, + validate_log_level, + validate_delete_request, + validate_query_request, + validate_get_request, + validate_update_request, + validate_detokenize_request, + validate_tokenize_request, + validate_invoke_connection_params, +) \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py new file mode 100644 index 00000000..c3026e75 --- /dev/null +++ b/skyflow/utils/validations/_validations.py @@ -0,0 +1,561 @@ +import json +from skyflow.service_account import is_expired +from skyflow.utils.enums import LogLevel, Env, RedactionType, TokenMode +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.logger import log_info, log_error_log + +valid_vault_config_keys = ["vault_id", "cluster_id", "credentials", "env"] +valid_connection_config_keys = ["connection_id", "connection_url", "credentials"] +valid_credentials_keys = ["path", "roles", "context", "token", "credentials_string"] +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def validate_required_field(logger, config, field_name, expected_type, empty_error, invalid_error): + field_value = config.get(field_name) + + if field_name not in config or not isinstance(field_value, expected_type): + if field_name == "vault_id": + logger.error(SkyflowMessages.ErrorLogs.VAULTID_IS_REQUIRED.value) + if field_name == "cluster_id": + logger.error(SkyflowMessages.ErrorLogs.CLUSTER_ID_IS_REQUIRED.value) + if field_name == "connection_id": + logger.error(SkyflowMessages.ErrorLogs.CONNECTION_ID_IS_REQUIRED.value) + if field_name == "connection_url": + logger.error(SkyflowMessages.ErrorLogs.INVALID_CONNECTION_URL.value) + raise SkyflowError(invalid_error, invalid_input_error_code) + + if isinstance(field_value, str) and not field_value.strip(): + if field_name == "vault_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_VAULTID.value) + if field_name == "cluster_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CLUSTER_ID.value) + if field_name == "connection_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CONNECTION_ID.value) + if field_name == "connection_url": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CONNECTION_URL.value) + if field_name == "path": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CREDENTIALS_PATH.value) + if field_name == "credentials_string": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CREDENTIALS_STRING.value) + if field_name == "token": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_TOKEN_VALUE.value) + if field_name == "api_key": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_API_KEY_VALUE.value) + raise SkyflowError(empty_error, invalid_input_error_code) + +def validate_api_key(api_key: str, logger = None) -> bool: + if not api_key.startswith('sky-'): + log_error_log(SkyflowMessages.ErrorLogs.INVALID_API_KEY.value, logger=logger) + return False + + if len(api_key) != 42: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_API_KEY.value, logger = logger) + return False + + return True + +def validate_credentials(logger, credentials, config_id_type=None, config_id=None): + key_present = [k for k in ["path", "token", "credentials_string", "api_key"] if credentials.get(k)] + + if len(key_present) == 0: + error_message = ( + SkyflowMessages.Error.INVALID_CREDENTIALS_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else + SkyflowMessages.Error.INVALID_CREDENTIALS.value + ) + raise SkyflowError(error_message, invalid_input_error_code) + elif len(key_present) > 1: + error_message = ( + SkyflowMessages.Error.MULTIPLE_CREDENTIALS_PASSED_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else + SkyflowMessages.Error.MULTIPLE_CREDENTIALS_PASSED.value + ) + raise SkyflowError(error_message, invalid_input_error_code) + + if "roles" in credentials: + validate_required_field( + logger, credentials, "roles", list, + SkyflowMessages.Error.INVALID_ROLES_KEY_TYPE_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_ROLES_KEY_TYPE.value, + SkyflowMessages.Error.EMPTY_ROLES_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_ROLES.value + ) + + if "context" in credentials: + validate_required_field( + logger, credentials, "context", str, + SkyflowMessages.Error.EMPTY_CONTEXT_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CONTEXT.value, + SkyflowMessages.Error.INVALID_CONTEXT_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CONTEXT.value + ) + + if "credentials_string" in credentials: + validate_required_field( + logger, credentials, "credentials_string", str, + SkyflowMessages.Error.EMPTY_CREDENTIALS_STRING_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIALS_STRING.value, + SkyflowMessages.Error.INVALID_CREDENTIALS_STRING_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value + ) + elif "path" in credentials: + validate_required_field( + logger, credentials, "path", str, + SkyflowMessages.Error.EMPTY_CREDENTIAL_FILE_PATH_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIAL_FILE_PATH.value, + SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value + ) + elif "token" in credentials: + validate_required_field( + logger, credentials, "token", str, + SkyflowMessages.Error.EMPTY_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIALS_TOKEN.value, + SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value + ) + if is_expired(credentials.get("token"), logger): + raise SkyflowError( + SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value, + invalid_input_error_code + ) + elif "api_key" in credentials: + validate_required_field( + logger, credentials, "api_key", str, + SkyflowMessages.Error.EMPTY_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_API_KEY.value, + SkyflowMessages.Error.INVALID_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_API_KEY.value + ) + if not validate_api_key(credentials.get("api_key"), logger): + raise SkyflowError(SkyflowMessages.Error.INVALID_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_API_KEY.value, + invalid_input_error_code) + +def validate_log_level(logger, log_level): + if not isinstance(log_level, LogLevel): + raise SkyflowError( SkyflowMessages.Error.INVALID_LOG_LEVEL.value, invalid_input_error_code) + + if log_level is None: + raise SkyflowError(SkyflowMessages.Error.EMPTY_LOG_LEVEL.value, invalid_input_error_code) + +def validate_keys(logger, config, config_keys): + for key in config.keys(): + if key not in config_keys: + raise SkyflowError(SkyflowMessages.Error.INVALID_KEY.value.format(key), invalid_input_error_code) + +def validate_vault_config(logger, config): + log_info(SkyflowMessages.Info.VALIDATING_VAULT_CONFIG.value, logger) + validate_keys(logger, config, valid_vault_config_keys) + + # Validate vault_id (string, not empty) + validate_required_field( + logger, config, "vault_id", str, + SkyflowMessages.Error.EMPTY_VAULT_ID.value, + SkyflowMessages.Error.INVALID_VAULT_ID.value + ) + vault_id = config.get("vault_id") + # Validate cluster_id (string, not empty) + validate_required_field( + logger, config, "cluster_id", str, + SkyflowMessages.Error.EMPTY_CLUSTER_ID.value.format(vault_id), + SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id) + ) + + # Validate credentials (dict, not empty) + if "credentials" in config and not config.get("credentials"): + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("vault", vault_id), invalid_input_error_code) + + if "credentials" in config and config.get("credentials"): + validate_credentials(logger, config.get("credentials"), "vault", vault_id) + + # Validate env (optional, should be one of LogLevel values) + if "env" in config and config.get("env") not in Env: + logger.error(SkyflowMessages.ErrorLogs.VAULTID_IS_REQUIRED.value) + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + return True + +def validate_update_vault_config(logger, config): + + validate_keys(logger, config, valid_vault_config_keys) + + # Validate vault_id (string, not empty) + validate_required_field( + logger, config, "vault_id", str, + SkyflowMessages.Error.EMPTY_VAULT_ID.value, + SkyflowMessages.Error.INVALID_VAULT_ID.value + ) + + vault_id = config.get("vault_id") + + if "cluster_id" in config and not config.get("cluster_id"): + raise SkyflowError(SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id), invalid_input_error_code) + + if "env" in config and config.get("env") not in Env: + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("vault", vault_id), invalid_input_error_code) + + validate_credentials(logger, config.get("credentials"), "vault", vault_id) + + return True + +def validate_connection_config(logger, config): + log_info(SkyflowMessages.Info.VALIDATING_CONNECTION_CONFIG.value, logger) + validate_keys(logger, config, valid_connection_config_keys) + + validate_required_field( + logger, config, "connection_id" , str, + SkyflowMessages.Error.EMPTY_CONNECTION_ID.value, + SkyflowMessages.Error.INVALID_CONNECTION_ID.value + ) + + connection_id = config.get("connection_id") + + validate_required_field( + logger, config, "connection_url", str, + SkyflowMessages.Error.EMPTY_CONNECTION_URL.value.format(connection_id), + SkyflowMessages.Error.INVALID_CONNECTION_URL.value.format(connection_id) + ) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("connection", connection_id), invalid_input_error_code) + + validate_credentials(logger, config.get("credentials"), "connection", connection_id) + + return True + +def validate_update_connection_config(logger, config): + + validate_keys(logger, config, valid_connection_config_keys) + + validate_required_field( + logger, config, "connection_id", str, + SkyflowMessages.Error.EMPTY_CONNECTION_ID.value, + SkyflowMessages.Error.INVALID_CONNECTION_ID.value + ) + + connection_id = config.get("connection_id") + + validate_required_field( + logger, config, "connection_url", str, + SkyflowMessages.Error.EMPTY_CONNECTION_URL.value.format(connection_id), + SkyflowMessages.Error.INVALID_CONNECTION_URL.value.format(connection_id) + ) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("connection", connection_id), invalid_input_error_code) + validate_credentials(logger, config.get("credentials")) + + return True + + +def validate_insert_request(logger, request): + if not isinstance(request.table_name, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) + if not request.table_name.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) + + if not isinstance(request.values, list) or not all(isinstance(v, dict) for v in request.values): + log_error_log(SkyflowMessages.ErrorLogs.VALUES_IS_REQUIRED.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if not len(request.values): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_VALUES.value.format("INSERT"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_DATA_IN_INSERT.value, invalid_input_error_code) + + for i, item in enumerate(request.values, start=1): + for key, value in item.items(): + if key is None or key == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_KEY_IN_VALUES.value.format("INSERT"), logger = logger) + + if value is None or value == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_VALUE_IN_VALUES.value.format("INSERT", key), logger = logger) + + if request.upsert is not None and (not isinstance(request.upsert, str) or not request.upsert.strip()): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_UPSERT.value("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_UPSERT_OPTIONS_TYPE.value, invalid_input_error_code) + + if request.homogeneous is not None and not isinstance(request.homogeneous, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_HOMOGENEOUS_TYPE.value, invalid_input_error_code) + + if request.upsert and request.homogeneous: + log_error_log(SkyflowMessages.ErrorLogs.HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT.value.format("INSERT"), invalid_input_error_code) + + if request.token_mode is not None: + if not isinstance(request.token_mode, TokenMode): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_MODE_TYPE.value, invalid_input_error_code) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if not isinstance(request.continue_on_error, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value, invalid_input_error_code) + + if request.tokens: + for i, item in enumerate(request.tokens, start=1): + for key, value in item.items(): + if key is None or key == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_VALUE_IN_TOKENS.value.format("INSERT"), + logger=logger) + + if value is None or value == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_KEY_IN_TOKENS.value.format("INSERT", key), + logger=logger) + if not isinstance(request.tokens, list) or not request.tokens or not all( + isinstance(t, dict) for t in request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE and not request.tokens: + raise SkyflowError(SkyflowMessages.Error.NO_TOKENS_IN_INSERT.value.format(request.token_mode), invalid_input_error_code) + + if request.token_mode == TokenMode.DISABLE and request.tokens: + raise SkyflowError(SkyflowMessages.Error.TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE_STRICT: + if len(request.values) != len(request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, invalid_input_error_code) + + for v, t in zip(request.values, request.tokens): + if set(v.keys()) != set(t.keys()): + log_error_log(SkyflowMessages.ErrorLogs.MISMATCH_OF_FIELDS_AND_TOKENS.value.format("INSERT"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, invalid_input_error_code) + +def validate_delete_request(logger, request): + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not request.ids: + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_IDS.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_RECORD_IDS_IN_DELETE.value, invalid_input_error_code) + +def validate_query_request(logger, request): + if not request.query: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_IS_REQUIRED.value.format("QUERY"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_QUERY.value, invalid_input_error_code) + + if not isinstance(request.query, str): + query_type = str(type(request.query)) + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_TYPE.value.format(query_type), invalid_input_error_code) + + if not request.query.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_QUERY.value.format("QUERY"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_QUERY.value, invalid_input_error_code) + + if not request.query.upper().startswith("SELECT"): + command = request.query + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_COMMAND.value.format(command), invalid_input_error_code) + +def validate_get_request(logger, request): + redaction_type = request.redaction_type + column_name = request.column_name + column_values = request.column_values + skyflow_ids = request.ids + fields = request.fields + offset = request.offset + limit = request.limit + download_url = request.download_url + + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not skyflow_ids and not column_name and not column_values: + log_error_log(SkyflowMessages.ErrorLogs.NEITHER_IDS_NOR_COLUMN_NAME_PASSED.value.format("GET"), logger = logger) + + if skyflow_ids and (not isinstance(skyflow_ids, list) or not skyflow_ids): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_IDS.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_IDS_TYPE.value.format(type(skyflow_ids)), invalid_input_error_code) + + if skyflow_ids: + for index, skyflow_id in enumerate(skyflow_ids): + if skyflow_id is None or skyflow_id == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_ID_IN_IDS.value.format("GET", index), + logger=logger) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if redaction_type is not None and not isinstance(redaction_type, RedactionType): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(redaction_type)), invalid_input_error_code) + + if fields is not None and (not isinstance(fields, list) or not fields): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_FIELDS.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_FIELDS_VALUE.value.format(type(fields)), invalid_input_error_code) + + if offset is not None and limit is not None: + raise SkyflowError( + SkyflowMessages.Error.BOTH_OFFSET_AND_LIMIT_SPECIFIED.value, + invalid_input_error_code) + + if offset is not None and not isinstance(offset, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_OFF_SET_VALUE.value(type(offset)), invalid_input_error_code) + + if limit is not None and not isinstance(limit, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_LIMIT_VALUE.value(type(limit)), invalid_input_error_code) + + if download_url is not None and not isinstance(download_url, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_DOWNLOAD_URL_VALUE.value(type(download_url)), invalid_input_error_code) + + if column_name is not None and (not isinstance(column_name, str) or not column_name.strip()): + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + + if column_values is not None and ( + not isinstance(column_values, list) or not column_values or not all( + isinstance(val, str) for val in column_values)): + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_VALUE.value.format(type(column_values)), invalid_input_error_code) + + if request.return_tokens and redaction_type: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZATION_NOT_SUPPORTED_WITH_REDACTION.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.REDACTION_WITH_TOKENS_NOT_SUPPORTED.value, invalid_input_error_code) + + if (column_name or column_values) and request.return_tokens: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZATION_SUPPORTED_ONLY_WITH_IDS.value.format("GET"), + logger=logger) + raise SkyflowError(SkyflowMessages.Error.TOKENS_GET_COLUMN_NOT_SUPPORTED.value, invalid_input_error_code) + + if column_values and not column_name: + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_VALUES_IS_REQUIRED_GET.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_VALUE.value.format(type(column_values)), invalid_input_error_code) + + if column_name and not column_values: + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_NAME_IS_REQUIRED.value.format("GET"), logger = logger) + SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + + if (column_name or column_values) and skyflow_ids: + log_error_log(SkyflowMessages.ErrorLogs.BOTH_IDS_AND_COLUMN_NAME_PASSED.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED.value, invalid_input_error_code) + +def validate_update_request(logger, request): + skyflow_id = "" + field = {key: value for key, value in request.data.items() if key != "skyflow_id"} + + try: + skyflow_id = request.data.get("skyflow_id") + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.SKYFLOW_ID_IS_REQUIRED.value.format("UPDATE"), logger=logger) + + if not skyflow_id.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_SKYFLOW_ID.value.format("UPDATE"), logger = logger) + + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("UPDATE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("UPDATE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if not isinstance(request.data, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_FIELDS_TYPE.value(type(request.data)), invalid_input_error_code) + + if not len(request.data.items()): + raise SkyflowError(SkyflowMessages.Error.UPDATE_FIELD_KEY_ERROR.value, invalid_input_error_code) + + if request.token_mode is not None: + if not isinstance(request.token_mode, TokenMode): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_MODE_TYPE.value, invalid_input_error_code) + + if request.tokens: + if not isinstance(request.tokens, dict) or not request.tokens: + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value.format("UPDATE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE and not request.tokens: + raise SkyflowError(SkyflowMessages.Error.NO_TOKENS_IN_INSERT.value.format(request.token_mode), + invalid_input_error_code) + + if request.token_mode == TokenMode.DISABLE and request.tokens: + raise SkyflowError(SkyflowMessages.Error.TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE_STRICT: + if len(field) != len(request.tokens): + log_error_log( + SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("UPDATE"), + logger=logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, + invalid_input_error_code) + + if set(field.keys()) != set(request.tokens.keys()): + log_error_log( + SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("UPDATE"), + logger=logger) + raise SkyflowError( + SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, + invalid_input_error_code) + +def validate_detokenize_request(logger, request): + if not isinstance(request.redaction_type, RedactionType): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(request.redaction_type)), invalid_input_error_code) + + if not isinstance(request.continue_on_error, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value, invalid_input_error_code) + + if not len(request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.TOKENS_REQUIRED.value.format("DETOKENIZE"), logger = logger) + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value.format("DETOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value, invalid_input_error_code) + + if not isinstance(request.tokens, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.tokens)), invalid_input_error_code) + +def validate_tokenize_request(logger, request): + parameters = request.values + if not isinstance(parameters, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETERS.value.format(type(parameters)), invalid_input_error_code) + + if not len(parameters): + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETERS.value, invalid_input_error_code) + + for i, param in enumerate(parameters): + if not isinstance(param, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETER.value.format(i, type(param)), invalid_input_error_code) + + allowed_keys = {"value", "column_group"} + + if set(param.keys()) != allowed_keys: + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETER_KEY.value.format(i), invalid_input_error_code) + + if not param.get("value"): + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_VALUES_IS_REQUIRED_TOKENIZE.value.format("TOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETER_VALUE.value.format(i), invalid_input_error_code) + if not param.get("column_group"): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_COLUMN_GROUP_IN_COLUMN_VALUES.value.format("TOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETER_COLUMN_GROUP.value.format(i), invalid_input_error_code) + +def validate_invoke_connection_params(logger, query_params, path_params): + if not isinstance(path_params, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_PATH_PARAMS.value, invalid_input_error_code) + + if not isinstance(query_params, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) + + for param, value in path_params.items(): + if not(isinstance(param, str) and isinstance(value, str)): + raise SkyflowError(SkyflowMessages.Error.INVALID_PATH_PARAMS.value, invalid_input_error_code) + + for param, value in query_params.items(): + if not isinstance(param, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) + + try: + json.dumps(query_params) + except TypeError: + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) diff --git a/skyflow/vault/__init__.py b/skyflow/vault/__init__.py index 7b6868ef..e69de29b 100644 --- a/skyflow/vault/__init__.py +++ b/skyflow/vault/__init__.py @@ -1,5 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._client import Client -from ._config import * \ No newline at end of file diff --git a/skyflow/vault/_client.py b/skyflow/vault/_client.py deleted file mode 100644 index e426f59f..00000000 --- a/skyflow/vault/_client.py +++ /dev/null @@ -1,283 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import types -import requests -import asyncio -from skyflow.vault._insert import getInsertRequestBody, processResponse, convertResponse -from skyflow.vault._update import sendUpdateRequests, createUpdateResponseBody -from skyflow.vault._config import Configuration, ConnectionConfig, DeleteOptions, DetokenizeOptions, GetOptions, InsertOptions, UpdateOptions, QueryOptions -from skyflow.vault._connection import createRequest -from skyflow.vault._detokenize import sendDetokenizeRequests, createDetokenizeResponseBody -from skyflow.vault._get_by_id import sendGetByIdRequests, createGetResponseBody -from skyflow.vault._get import sendGetRequests -from skyflow.vault._delete import deleteProcessResponse -from skyflow.vault._query import getQueryRequestBody, getQueryResponse -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import log_info, log_error, InfoMessages, InterfaceName, getMetrics -from skyflow.vault._token import tokenProviderWrapper - -class Client: - def __init__(self, config: Configuration): - - interface = InterfaceName.CLIENT.value - - log_info(InfoMessages.INITIALIZE_CLIENT.value, interface=interface) - - if not isinstance(config.vaultID, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.VAULT_ID_INVALID_TYPE.value % ( - str(type(config.vaultID))), interface=interface) - if not isinstance(config.vaultURL, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.VAULT_URL_INVALID_TYPE.value % ( - str(type(config.vaultURL))), interface=interface) - - if not isinstance(config.tokenProvider, types.FunctionType): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.TOKEN_PROVIDER_ERROR.value % ( - str(type(config.tokenProvider))), interface=interface) - - self.vaultID = config.vaultID - self.vaultURL = config.vaultURL.rstrip('/') - self.tokenProvider = config.tokenProvider - self.storedToken = '' - log_info(InfoMessages.CLIENT_INITIALIZED.value, interface=interface) - - def insert(self, records: dict, options: InsertOptions = InsertOptions()): - interface = InterfaceName.INSERT.value - log_info(InfoMessages.INSERT_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - jsonBody = getInsertRequestBody(records, options) - requestURL = self._get_complete_vault_url() - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - - response = requests.post(requestURL, data=jsonBody, headers=headers) - processedResponse = processResponse(response) - result, partial = convertResponse(records, processedResponse, options) - if partial: - log_error(SkyflowErrorMessages.BATCH_INSERT_PARTIAL_SUCCESS.value, interface) - elif 'records' not in result: - log_error(SkyflowErrorMessages.BATCH_INSERT_FAILURE.value, interface) - else: - log_info(InfoMessages.INSERT_DATA_SUCCESS.value, interface) - return result - - def detokenize(self, records: dict, options: DetokenizeOptions = DetokenizeOptions()): - interface = InterfaceName.DETOKENIZE.value - log_info(InfoMessages.DETOKENIZE_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() + '/detokenize' - responses = asyncio.run(sendDetokenizeRequests( - records, url, self.storedToken, options)) - result, partial = createDetokenizeResponseBody(records, responses, options) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - elif 'records' not in result: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, SkyflowErrorMessages.SERVER_ERROR, result, interface=interface) - else: - log_info(InfoMessages.DETOKENIZE_SUCCESS.value, interface) - return result - - def get(self, records, options: GetOptions = GetOptions()): - interface = InterfaceName.GET.value - log_info(InfoMessages.GET_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendGetRequests( - records, options, url, self.storedToken)) - result, partial = createGetResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.GET_SUCCESS.value, interface) - - return result - - def get_by_id(self, records): - interface = InterfaceName.GET_BY_ID.value - log_info(InfoMessages.GET_BY_ID_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendGetByIdRequests( - records, url, self.storedToken)) - result, partial = createGetResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.GET_BY_ID_SUCCESS.value, interface) - - return result - - def invoke_connection(self, config: ConnectionConfig): - - interface = InterfaceName.INVOKE_CONNECTION.value - log_info(InfoMessages.INVOKE_CONNECTION_TRIGGERED.value, interface) - - session = requests.Session() - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - request = createRequest(config) - - if not 'X-Skyflow-Authorization'.lower() in request.headers: - request.headers['x-skyflow-authorization'] = self.storedToken - - request.headers['sky-metadata'] = json.dumps(getMetrics()) - - response = session.send(request) - session.close() - return processResponse(response, interface=interface) - - def query(self, queryInput, options: QueryOptions = QueryOptions()): - interface = InterfaceName.QUERY.value - log_info(InfoMessages.QUERY_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - jsonBody = getQueryRequestBody(queryInput, options) - requestURL = self._get_complete_vault_url() + "/query" - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Content-Type": "application/json", - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - - response = requests.post(requestURL, data=jsonBody, headers=headers) - result = getQueryResponse(response) - - log_info(InfoMessages.QUERY_SUCCESS.value, interface) - return result - - def _checkConfig(self, interface): - ''' - Performs basic check on the given client config - ''' - if not len(self.vaultID) > 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_VAULT_ID, interface=interface) - if not len(self.vaultURL) > 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_VAULT_URL, interface=interface) - - def _get_complete_vault_url(self): - ''' - Get the complete vault url from given vault url and vault id - ''' - return self.vaultURL + "/v1/vaults/" + self.vaultID - - def update(self, updateInput, options: UpdateOptions = UpdateOptions()): - interface = InterfaceName.UPDATE.value - log_info(InfoMessages.UPDATE_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendUpdateRequests( - updateInput, options, url, self.storedToken)) - result, partial = createUpdateResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.UPDATE_DATA_SUCCESS.value, interface) - return result - - def delete(self, records: dict, options: DeleteOptions = DeleteOptions()): - interface = InterfaceName.DELETE.value - log_info(InfoMessages.DELETE_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - error_list = [] - result_list = [] - errors = {} - result = {} - try: - record = records["records"] - if not isinstance(record, list): - recordsType = str(type(record)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - if len(record) == 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_RECORDS_IN_DELETE, interface=interface) - - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - try: - for record in records["records"]: - id = record["id"] - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_ID_TYPE.value % (idType), interface=interface) - if id == "": - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_ID_IN_DELETE, interface=interface) - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - try: - for record in records["records"]: - table = record["table"] - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - if table == "": - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_TABLE_IN_DELETE, interface=interface) - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - - partial=None - - for record in records["records"]: - request_url = self._get_complete_vault_url() + "/" + record["table"] + "/" + record["id"] - response = requests.delete(request_url, headers=headers) - partial,processed_response = deleteProcessResponse(response, records) - if processed_response is not None and processed_response.get('code') == 404: - errors.update({'id': record["id"], 'error': processed_response}) - error_list.append(errors) - else: - result_list.append(processed_response) - if result_list: - result.update({'records': result_list}) - if errors: - result.update({'errors': error_list}) - - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - - else: - log_info(InfoMessages.DELETE_DATA_SUCCESS.value, interface) - return result diff --git a/skyflow/vault/_config.py b/skyflow/vault/_config.py deleted file mode 100644 index 796c1824..00000000 --- a/skyflow/vault/_config.py +++ /dev/null @@ -1,89 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from enum import Enum -from types import FunctionType -from typing import List - - -class Configuration: - - def __init__(self, vaultID: str = None, vaultURL: str = None, tokenProvider: FunctionType = None): - - self.vaultID = '' - self.vaultURL = '' - - if tokenProvider == None and vaultURL == None and isinstance(vaultID, FunctionType): - self.tokenProvider = vaultID - elif tokenProvider == None and vaultID == None and isinstance(vaultURL, FunctionType): - self.tokenProvider = vaultURL - else: - if tokenProvider is None: - raise TypeError('tokenProvider must be given') - self.vaultID = vaultID or "" - self.vaultURL = vaultURL or "" - self.tokenProvider = tokenProvider - -class BYOT(Enum): - DISABLE = "DISABLE" - ENABLE = "ENABLE" - ENABLE_STRICT = "ENABLE_STRICT" - -class UpsertOption: - def __init__(self, table: str, column: str): - self.table = table - self.column = column - - -class InsertOptions: - def __init__(self, tokens: bool=True, upsert :List[UpsertOption]=None, continueOnError:bool=None, byot:BYOT=BYOT.DISABLE): - self.tokens = tokens - self.upsert = upsert - self.continueOnError = continueOnError - self.byot = byot - - -class UpdateOptions: - def __init__(self, tokens: bool = True): - self.tokens = tokens - -class GetOptions: - def __init__(self, tokens: bool = False): - self.tokens = tokens - -class DeleteOptions: - def __init__(self, tokens: bool=False): - self.tokens = tokens - -class QueryOptions: - def __init__(self): - pass - -class DetokenizeOptions: - def __init__(self, continueOnError: bool=True): - self.continueOnError = continueOnError - -class RequestMethod(Enum): - GET = 'GET' - POST = 'POST' - PUT = 'PUT' - PATCH = 'PATCH' - DELETE = 'DELETE' - - -class ConnectionConfig: - def __init__(self, connectionURL: str, methodName: RequestMethod, - pathParams: dict = {}, queryParams: dict = {}, requestHeader: dict = {}, requestBody: dict = {}): - self.connectionURL = connectionURL.rstrip("/") - self.methodName = methodName - self.pathParams = pathParams - self.queryParams = queryParams - self.requestHeader = requestHeader - self.requestBody = requestBody - - -class RedactionType(Enum): - PLAIN_TEXT = "PLAIN_TEXT" - MASKED = "MASKED" - REDACTED = "REDACTED" - DEFAULT = "DEFAULT" diff --git a/skyflow/vault/_connection.py b/skyflow/vault/_connection.py deleted file mode 100644 index 86f75c08..00000000 --- a/skyflow/vault/_connection.py +++ /dev/null @@ -1,116 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from requests.sessions import PreparedRequest -from ._config import ConnectionConfig -from skyflow.errors._skyflow_errors import * -import requests -import json - -from skyflow._utils import InterfaceName, http_build_query, supported_content_types, r_urlencode - -interface = InterfaceName.INVOKE_CONNECTION.value - - -def createRequest(config: ConnectionConfig) -> PreparedRequest: - url = parsePathParams(config.connectionURL.rstrip('/'), config.pathParams) - - try: - if isinstance(config.requestHeader, dict): - header = to_lowercase_keys(json.loads( - json.dumps(config.requestHeader))) - else: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_REQUEST_BODY, interface=interface) - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_HEADERS, interface=interface) - if not 'Content-Type'.lower() in header: - header['content-type'] = supported_content_types["JSON"] - - try: - if isinstance(config.requestBody, dict): - json_data, files = get_data_from_content_type( - config.requestBody, header["content-type"]) - else: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_RESPONSE_BODY, interface=interface) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_REQUEST_BODY, interface=interface) - - verifyParams(config.queryParams, config.pathParams) - - try: - return requests.Request( - method=config.methodName.value, - url=url, - data=json_data, - headers=header, - params=config.queryParams, - files=files - ).prepare() - except requests.exceptions.InvalidURL: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_URL.value % ( - config.connectionURL), interface=interface) - - -def parsePathParams(url, pathParams): - result = url - for param, value in pathParams.items(): - result = result.replace('{' + param + '}', value) - - return result - - -def verifyParams(queryParams, pathParams): - if not isinstance(pathParams, dict): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_PATH_PARAMS, interface=interface) - if not isinstance(queryParams, dict): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_QUERY_PARAMS, interface=interface) - - for param, value in pathParams.items(): - if not(isinstance(param, str) and isinstance(value, str)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_PATH_PARAM_TYPE.value % ( - str(type(param)), str(type(value))), interface=interface) - - for param, value in queryParams.items(): - if not isinstance(param, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_QUERY_PARAM_TYPE.value % ( - str(type(param)), str(type(value))), interface=interface) - - try: - json.dumps(queryParams) - except TypeError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_QUERY_PARAMS, interface=interface) - - -def to_lowercase_keys(dict): - ''' - convert keys of dictionary to lowercase - ''' - result = {} - for key, value in dict.items(): - result[key.lower()] = value - - return result - - -def get_data_from_content_type(data, content_type): - ''' - Get request data according to content type - ''' - converted_data = data - files = {} - if content_type == supported_content_types["URLENCODED"]: - converted_data = http_build_query(data) - elif content_type == supported_content_types["FORMDATA"]: - converted_data = r_urlencode(list(), dict(), data) - files = {(None, None)} - elif content_type == supported_content_types["JSON"]: - converted_data = json.dumps(data) - - return converted_data, files diff --git a/skyflow/vault/_delete.py b/skyflow/vault/_delete.py deleted file mode 100644 index 9faa8820..00000000 --- a/skyflow/vault/_delete.py +++ /dev/null @@ -1,43 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName - -interface = InterfaceName.DELETE.value - - -def deleteProcessResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content - partial = False - try: - response.raise_for_status() - if statusCode == 204: - return None - try: - return partial,json.loads(content) - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response is not None and response.content is not None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == dict and 'message' in errorResponse[ - 'error']: - message = errorResponse['error']['message'] - partial=True - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - error = {} - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - error.update({"code": statusCode, "description": message}) - return partial,error - diff --git a/skyflow/vault/_detokenize.py b/skyflow/vault/_detokenize.py deleted file mode 100644 index 9e19c3f2..00000000 --- a/skyflow/vault/_detokenize.py +++ /dev/null @@ -1,134 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession, request -import json -from ._config import RedactionType -from skyflow._utils import InterfaceName, getMetrics -from skyflow.vault._config import DetokenizeOptions - -interface = InterfaceName.DETOKENIZE.value - - -def getDetokenizeRequestBody(data): - try: - token = data["token"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TOKEN_KEY_ERROR, interface=interface) - if not isinstance(token, str): - tokenType = str(type(token)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TOKEN_TYPE.value % ( - tokenType), interface=interface) - - if "redaction" in data: - if not isinstance(data["redaction"], RedactionType): - redactionType = str(type(data["redaction"])) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - else: - redactionType = data["redaction"] - else: - redactionType = RedactionType.PLAIN_TEXT - - requestBody = {"detokenizationParameters": []} - requestBody["detokenizationParameters"].append({ - "token": token, - "redaction": redactionType.value - }) - return requestBody - -def getBulkDetokenizeRequestBody(records): - bulkRequestBody = {"detokenizationParameters": []} - for record in records: - requestBody = getDetokenizeRequestBody(record) - bulkRequestBody["detokenizationParameters"].append(requestBody["detokenizationParameters"][0]) - return bulkRequestBody - -async def sendDetokenizeRequests(data, url, token, options: DetokenizeOptions): - - tasks = [] - - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - if not options.continueOnError: - requestBody = getBulkDetokenizeRequestBody(records) - jsonBody = json.dumps(requestBody) - validatedRecords.append(jsonBody) - else: - for record in records: - requestBody = getDetokenizeRequestBody(record) - jsonBody = json.dumps(requestBody) - validatedRecords.append(jsonBody) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - - } - task = asyncio.ensure_future(post(url, record, headers, session)) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - - -async def post(url, data, headers, session): - async with session.post(url, data=data, headers=headers, ssl=False) as response: - try: - return (await response.read(), response.status, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status) - - -def createDetokenizeResponseBody(records, responses, options: DetokenizeOptions): - result = { - "records": [], - "errors": [] - } - partial = False - for index, response in enumerate(responses): - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - for record in jsonRes["records"]: - temp = {} - temp["token"] = record["token"] - temp["value"] = record["value"] - result["records"].append(temp) - else: - temp = {"error": {}} - - if options.continueOnError: - temp["token"] = records["records"][index]["token"] - - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 2 and r[2] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[2]) - result["errors"].append(temp) - partial = True - if len(result["records"]) == 0: - partial = False - result.pop("records") - elif len(result["errors"]) == 0: - result.pop("errors") - return result, partial diff --git a/skyflow/vault/_get.py b/skyflow/vault/_get.py deleted file mode 100644 index f00ed2e4..00000000 --- a/skyflow/vault/_get.py +++ /dev/null @@ -1,127 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession -from skyflow.vault._config import RedactionType, GetOptions -from skyflow._utils import InterfaceName, getMetrics -from skyflow.vault._get_by_id import get - -interface = InterfaceName.GET.value - -def getGetRequestBody(data, options: GetOptions): - requestBody = {} - ids = None - if "ids" in data: - ids = data["ids"] - if not isinstance(ids, list): - idsType = str(type(ids)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_IDS_TYPE.value % (idsType), interface=interface) - for id in ids: - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_ID_TYPE.value % ( - idType), interface=interface) - requestBody["skyflow_ids"] = ids - try: - table = data["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - else: - requestBody["tableName"] = table - - if options.tokens: - if data.get("redaction"): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_WITH_TOKENS_NOT_SUPPORTED, interface=interface) - if (data.get('columnName') or data.get('columnValues')): - raise SkyflowError(SkyflowErrorCodes.TOKENS_GET_COLUMN_NOT_SUPPORTED, - SkyflowErrorMessages.TOKENS_GET_COLUMN_NOT_SUPPORTED, interface=interface) - requestBody["tokenization"] = options.tokens - else: - try: - redaction = data["redaction"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_KEY_ERROR, interface=interface) - if not isinstance(redaction, RedactionType): - redactionType = str(type(redaction)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - else: - requestBody["redaction"] = redaction.value - - columnName = None - if "columnName" in data: - columnName = data["columnName"] - if not isinstance(columnName, str): - columnNameType = str(type(columnName)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_COLUMN_NAME.value % ( - columnNameType), interface=interface) - - columnValues = None - if columnName is not None and "columnValues" in data: - columnValues = data["columnValues"] - if not isinstance(columnValues, list): - columnValuesType = str(type(columnValues)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_COLUMN_VALUE.value % ( - columnValuesType), interface=interface) - else: - requestBody["column_name"] = columnName - requestBody["column_values"] = columnValues - - if (ids is None and (columnName is None or columnValues is None)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR, interface=interface) - elif (ids != None and (columnName != None or columnValues != None)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED, interface=interface) - return requestBody - -async def sendGetRequests(data, options: GetOptions, url, token): - tasks = [] - try: - records = data["records"] - except KeyError: - raise SkyflowError( - SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, - interface=interface - ) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError( - SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % recordsType, - interface=interface - ) - - validatedRecords = [] - for record in records: - requestBody = getGetRequestBody(record, options) - validatedRecords.append(requestBody) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - table = record.pop("tableName") - params = record - if options.tokens: - params["tokenization"] = json.dumps(record["tokenization"]) - task = asyncio.ensure_future( - get(url, headers, params, session, table) - ) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks \ No newline at end of file diff --git a/skyflow/vault/_get_by_id.py b/skyflow/vault/_get_by_id.py deleted file mode 100644 index d4ad6a04..00000000 --- a/skyflow/vault/_get_by_id.py +++ /dev/null @@ -1,116 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession -import json -from ._config import RedactionType -from skyflow._utils import InterfaceName, getMetrics - -interface = InterfaceName.GET_BY_ID.value - -def getGetByIdRequestBody(data): - try: - ids = data["ids"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - if not isinstance(ids, list): - idsType = str(type(ids)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_IDS_TYPE.value % (idsType), interface=interface) - for id in ids: - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_ID_TYPE.value % ( - idType), interface=interface) - try: - table = data["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - try: - redaction = data["redaction"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_KEY_ERROR, interface=interface) - if not isinstance(redaction, RedactionType): - redactionType = str(type(redaction)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - return ids, table, redaction.value - - -async def sendGetByIdRequests(data, url, token): - tasks = [] - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - for record in records: - ids, table, redaction = getGetByIdRequestBody(record) - validatedRecords.append((ids, table, redaction)) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - params = {"skyflow_ids": record[0], "redaction": record[2]} - task = asyncio.ensure_future( - get(url, headers, params, session, record[1])) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - -async def get(url, headers, params, session, table): - async with session.get(url + "/" + table, headers=headers, params=params, ssl=False) as response: - try: - return (await response.read(), response.status, table, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status, table) - -def createGetResponseBody(responses): - result = { - "records": [], - "errors": [] - } - partial = False - for response in responses: - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - changedRecords = [] - for record in jsonRes["records"]: - temp = record - temp["table"] = r[2] - changedRecords.append(temp) - result["records"] += changedRecords - else: - temp = {"error": {}} - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 3 and r[3] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[3]) - result["errors"].append(temp) - partial = True - return result, partial diff --git a/skyflow/vault/_insert.py b/skyflow/vault/_insert.py deleted file mode 100644 index 8de342ae..00000000 --- a/skyflow/vault/_insert.py +++ /dev/null @@ -1,238 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName -from skyflow.vault._config import BYOT, InsertOptions - -interface = InterfaceName.INSERT.value - - -def getInsertRequestBody(data, options: InsertOptions): - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - upsertOptions = options.upsert - - if upsertOptions: - validateUpsertOptions(upsertOptions=upsertOptions) - - requestPayload = [] - for index, record in enumerate(records): - tableName, fields = getTableAndFields(record) - postPayload = { - "tableName": tableName, - "fields": fields, - "method": "POST", - "quorum": True, - } - validateTokensAndByotMode(record, options.byot) - if "tokens" in record: - tokens = getTokens(record) - postPayload["tokens"] = tokens - - if upsertOptions: - postPayload["upsert"] = getUpsertColumn(tableName,upsertOptions) - - if options.tokens: - postPayload['tokenization'] = True - - requestPayload.append(postPayload) - requestBody = { - "records": requestPayload, - "continueOnError": options.continueOnError, - "byot": options.byot.value - } - if options.continueOnError == None: - requestBody.pop('continueOnError') - try: - jsonBody = json.dumps(requestBody) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_JSON.value % ( - 'insert payload'), interface=interface) - - return jsonBody - - -def getTableAndFields(record): - try: - table = record["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - - try: - fields = record["fields"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FIELDS_KEY_ERROR, interface=interface) - - if not isinstance(fields, dict): - fieldsType = str(type(fields)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % ( - fieldsType), interface=interface) - - return (table, fields) - -def validateTokensAndByotMode(record, byot:BYOT): - - if not isinstance(byot, BYOT): - byotType = str(type(byot)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_BYOT_TYPE.value % (byotType), interface=interface) - - if byot == BYOT.DISABLE: - if "tokens" in record: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.TOKENS_PASSED_FOR_BYOT_DISABLE, interface=interface) - elif "tokens" not in record: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % byot.value, interface=interface) - elif byot == BYOT.ENABLE_STRICT: - tokens = record["tokens"] - fields = record["fields"] - if len(tokens) != len(fields): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT, interface=interface) - -def getTokens(record): - tokens = record["tokens"] - if not isinstance(tokens, dict): - tokensType = str(type(tokens)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TOKENS_TYPE.value % ( - tokensType), interface=interface) - - if len(tokens) == 0 : - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_TOKENS_IN_INSERT, interface= interface) - - fields = record["fields"] - for tokenKey in tokens: - if tokenKey not in fields: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS, interface= interface) - return tokens - -def processResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content.decode('utf-8') - try: - response.raise_for_status() - try: - jsonContent = json.loads(content) - if 'x-request-id' in response.headers: - requestId = response.headers['x-request-id'] - jsonContent['requestId'] = requestId - return jsonContent - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response != None and response.content != None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - raise SkyflowError(statusCode, message, interface=interface) - - -def convertResponse(request: dict, response: dict, options: InsertOptions): - responseArray = response['responses'] - requestId = response['requestId'] - records = request['records'] - - if options.continueOnError: - return buildResponseWithContinueOnError(responseArray, records, options.tokens, requestId) - - else: - return buildResponseWithoutContinueOnError(responseArray, records, options.tokens) - -def buildResponseWithContinueOnError(responseArray, records, tokens: bool, requestId): - partial = False - errors = [] - result = [] - for idx, response in enumerate(responseArray): - table = records[idx]['table'] - body = response['Body'] - status = response['Status'] - - if 'records' in body: - skyflow_id = body['records'][0]['skyflow_id'] - if tokens: - fieldsDict = body['records'][0]['tokens'] - fieldsDict['skyflow_id'] = skyflow_id - result.append({'table': table, 'fields': fieldsDict, 'request_index': idx}) - else: - result.append({'table': table, 'skyflow_id': skyflow_id, 'request_index': idx}) - elif 'error' in body: - partial = True - message = body['error'] - message += ' - request id: ' + requestId - error = {"code": status, "description": message, "request_index": idx} - errors.append({"error": error}) - finalResponse = {"records": result, "errors": errors} - if len(result) == 0: - partial = False - finalResponse.pop('records') - elif len(errors) == 0: - finalResponse.pop('errors') - return finalResponse, partial - -def buildResponseWithoutContinueOnError(responseArray, records, tokens: bool): - # recordsSize = len(records) - result = [] - for idx, _ in enumerate(responseArray): - table = records[idx]['table'] - skyflow_id = responseArray[idx]['records'][0]['skyflow_id'] - if tokens: - fieldsDict = responseArray[idx]['records'][0]['tokens'] - fieldsDict['skyflow_id'] = skyflow_id - result.append({'table': table, 'fields': fieldsDict, 'request_index': idx}) - else: - result.append({'table': table, 'skyflow_id': skyflow_id, 'request_index': idx}) - return {'records': result}, False - -def getUpsertColumn(tableName, upsertOptions): - uniqueColumn:str = '' - for upsertOption in upsertOptions: - if tableName == upsertOption.table: - uniqueColumn = upsertOption.column - return uniqueColumn - -def validateUpsertOptions(upsertOptions): - if not isinstance(upsertOptions,list): - upsertOptionsType = str(type(upsertOptions)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_OPTIONS_TYPE.value %( - upsertOptionsType),interface=interface) - if len(upsertOptions) == 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTIONS_LIST.value, interface=interface) - - for index, upsertOption in enumerate(upsertOptions): - if upsertOption.table == None or not isinstance(upsertOption.table,str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_TABLE_TYPE.value %( - index),interface=interface) - if upsertOption.table == '': - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_TABLE.value %( - index),interface=interface) - if upsertOption.column == None or not isinstance(upsertOption.column,str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_COLUMN_TYPE.value %( - index),interface=interface) - if upsertOption.column == '': - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_COLUMN.value %( - index),interface=interface) \ No newline at end of file diff --git a/skyflow/vault/_query.py b/skyflow/vault/_query.py deleted file mode 100644 index 373264fa..00000000 --- a/skyflow/vault/_query.py +++ /dev/null @@ -1,62 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from ._config import QueryOptions -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName - -interface = InterfaceName.QUERY.value - - -def getQueryRequestBody(data, options): - try: - query = data["query"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.QUERY_KEY_ERROR, interface=interface) - - if not isinstance(query, str): - queryType = str(type(query)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_QUERY_TYPE.value % queryType, interface=interface) - - if not query.strip(): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT,SkyflowErrorMessages.EMPTY_QUERY.value, interface=interface) - - requestBody = {"query": query} - try: - jsonBody = json.dumps(requestBody) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_JSON.value % ( - 'query payload'), interface=interface) - - return jsonBody - -def getQueryResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content.decode('utf-8') - try: - response.raise_for_status() - try: - return json.loads(content) - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response != None and response.content != None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - raise SkyflowError(SkyflowErrorCodes.INVALID_INDEX, message, interface=interface) - error = {"error": {}} - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - error['error'].update({"code": statusCode, "description": message}) - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, SkyflowErrorMessages.SERVER_ERROR.value, error, interface=interface) diff --git a/skyflow/vault/_token.py b/skyflow/vault/_token.py deleted file mode 100644 index d80f1751..00000000 --- a/skyflow/vault/_token.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import jwt -import time -from skyflow.errors._skyflow_errors import * - - -def tokenProviderWrapper(storedToken: str, newTokenProvider, interface: str): - ''' - Check if stored token is not expired, if not return a new token - ''' - - if len(storedToken) == 0: - newToken = newTokenProvider() - verify_token_from_provider(newToken, interface) - return newToken - - try: - decoded = jwt.decode(storedToken, options={ - "verify_signature": False, "verify_aud": False}) - if time.time() < decoded['exp']: - return storedToken - else: - newToken = newTokenProvider() - verify_token_from_provider(newToken, interface) - return newToken - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.JWT_DECODE_ERROR, interface=interface) - - -def verify_token_from_provider(token, interface): - ''' - Verify the jwt from token provider - ''' - try: - jwt.decode(token, options={ - "verify_signature": False, - "verify_aud": False - }, algorithms=['RS256']) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN, interface=interface) diff --git a/skyflow/vault/_update.py b/skyflow/vault/_update.py deleted file mode 100644 index c27a0919..00000000 --- a/skyflow/vault/_update.py +++ /dev/null @@ -1,106 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import asyncio -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from ._insert import getTableAndFields -from skyflow._utils import InterfaceName, getMetrics -from aiohttp import ClientSession -from ._config import UpdateOptions - -interface = InterfaceName.UPDATE.value - -async def sendUpdateRequests(data,options: UpdateOptions,url,token): - tasks = [] - - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - for record in records: - tableName = validateUpdateRecord(record) - validatedRecords.append(record) - async with ClientSession() as session: - for record in validatedRecords: - recordUrl = url +'/'+ tableName +'/'+ record["id"] - reqBody = { - "record": { - "fields": record["fields"] - }, - "tokenization": options.tokens - } - reqBody = json.dumps(reqBody) - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - task = asyncio.ensure_future(put(recordUrl, reqBody, headers, session)) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - -def validateUpdateRecord(record): - try: - id = record["id"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_ID_TYPE.value % (idType), interface=interface) - table, fields = getTableAndFields(record) - keysLength = len(fields.keys()) - if(keysLength < 1): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.UPDATE_FIELD_KEY_ERROR, interface= interface) - return table - -async def put(url, data, headers, session): - async with session.put(url, data=data, headers=headers, ssl=False) as response: - try: - return (await response.read(), response.status, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status) - - -def createUpdateResponseBody(responses): - result = { - "records": [], - "errors": [] - } - partial = False - for response in responses: - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - temp = {} - temp["id"] = jsonRes["skyflow_id"] - if "tokens" in jsonRes: - temp["fields"] = jsonRes["tokens"] - result["records"].append(temp) - else: - temp = {"error": {}} - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 2 and r[2] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[2]) - result["errors"].append(temp) - partial = True - return result, partial diff --git a/skyflow/vault/client/__init__.py b/skyflow/vault/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/vault/client/client.py b/skyflow/vault/client/client.py new file mode 100644 index 00000000..34a9374a --- /dev/null +++ b/skyflow/vault/client/client.py @@ -0,0 +1,102 @@ +import json +from skyflow.generated.rest import Configuration, RecordsApi, ApiClient, TokensApi, QueryApi +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired +from skyflow.utils import get_vault_url, get_credentials, SkyflowMessages +from skyflow.utils.logger import log_info + + +class VaultClient: + def __init__(self, config): + self.__config = config + self.__common_skyflow_credentials = None + self.__log_level = None + self.__client_configuration = None + self.__api_client = None + self.__logger = None + self.__is_config_updated = False + self.__bearer_token = None + + def set_common_skyflow_credentials(self, credentials): + self.__common_skyflow_credentials = credentials + + def set_logger(self, log_level, logger): + self.__log_level = log_level + self.__logger = logger + + def initialize_client_configuration(self): + credentials = get_credentials(self.__config.get("credentials"), self.__common_skyflow_credentials, logger = self.__logger) + token = self.get_bearer_token(credentials) + vault_url = get_vault_url(self.__config.get("cluster_id"), + self.__config.get("env"), + self.__config.get("vault_id"), + logger = self.__logger) + self.__client_configuration = Configuration(host=vault_url, access_token=token) + self.initialize_api_client(self.__client_configuration) + + def initialize_api_client(self, config): + self.__api_client = ApiClient(config) + + def get_records_api(self): + return RecordsApi(self.__api_client) + + def get_tokens_api(self): + return TokensApi(self.__api_client) + + def get_query_api(self): + return QueryApi(self.__api_client) + + def get_vault_id(self): + return self.__config.get("vault_id") + + def get_bearer_token(self, credentials): + if 'api_key' in credentials: + return credentials.get('api_key') + elif 'token' in credentials: + return credentials.get("token") + + options = { + "role_ids": self.__config.get("roles"), + "ctx": self.__config.get("ctx") + } + + if self.__bearer_token is None or self.__is_config_updated: + if 'path' in credentials: + path = credentials.get("path") + self.__bearer_token, _ = generate_bearer_token( + path, + options, + self.__logger + ) + else: + credentials_string = credentials.get('credentials_string') + log_info(SkyflowMessages.Info.GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED.value, self.__logger) + self.__bearer_token, _ = generate_bearer_token_from_creds( + credentials_string, + options, + self.__logger + ) + self.__is_config_updated = False + else: + log_info(SkyflowMessages.Info.REUSE_BEARER_TOKEN.value, self.__logger) + + if is_expired(self.__bearer_token): + self.__is_config_updated = True + raise SyntaxError(SkyflowMessages.Error.EXPIRED_TOKEN.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + return self.__bearer_token + + def update_config(self, config): + self.__config.update(config) + self.__is_config_updated = True + + def get_config(self): + return self.__config + + def get_common_skyflow_credentials(self): + return self.__common_skyflow_credentials + + def get_log_level(self): + return self.__log_level + + def get_logger(self): + return self.__logger \ No newline at end of file diff --git a/skyflow/vault/connection/__init__.py b/skyflow/vault/connection/__init__.py new file mode 100644 index 00000000..d5a3f574 --- /dev/null +++ b/skyflow/vault/connection/__init__.py @@ -0,0 +1,2 @@ +from ._invoke_connection_request import InvokeConnectionRequest +from ._invoke_connection_response import InvokeConnectionResponse \ No newline at end of file diff --git a/skyflow/vault/connection/_invoke_connection_request.py b/skyflow/vault/connection/_invoke_connection_request.py new file mode 100644 index 00000000..9634dfb3 --- /dev/null +++ b/skyflow/vault/connection/_invoke_connection_request.py @@ -0,0 +1,12 @@ +class InvokeConnectionRequest: + def __init__(self, + method, + body = None, + path_params = None, + query_params = None, + headers = None): + self.body = body if body is not None else {} + self.method = method + self.path_params = path_params if path_params is not None else {} + self.query_params = query_params if query_params is not None else {} + self.headers = headers if headers is not None else {} \ No newline at end of file diff --git a/skyflow/vault/connection/_invoke_connection_response.py b/skyflow/vault/connection/_invoke_connection_response.py new file mode 100644 index 00000000..661b61d3 --- /dev/null +++ b/skyflow/vault/connection/_invoke_connection_response.py @@ -0,0 +1,9 @@ +class InvokeConnectionResponse: + def __init__(self, response = None): + self.response = response + + def __repr__(self): + return f"ConnectionResponse({self.response})" + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/controller/__init__.py b/skyflow/vault/controller/__init__.py new file mode 100644 index 00000000..14301fb3 --- /dev/null +++ b/skyflow/vault/controller/__init__.py @@ -0,0 +1,2 @@ +from ._vault import Vault +from ._connections import Connection \ No newline at end of file diff --git a/skyflow/vault/controller/_audit.py b/skyflow/vault/controller/_audit.py new file mode 100644 index 00000000..96aab2fa --- /dev/null +++ b/skyflow/vault/controller/_audit.py @@ -0,0 +1,8 @@ +class Audit: + #members + def __init__(self): + pass + + def list(self): + pass + \ No newline at end of file diff --git a/skyflow/vault/controller/_bin_look_up.py b/skyflow/vault/controller/_bin_look_up.py new file mode 100644 index 00000000..242cb640 --- /dev/null +++ b/skyflow/vault/controller/_bin_look_up.py @@ -0,0 +1,7 @@ +class BinLookUp: + #members + def __init__(self): + pass + + def get(self): + pass \ No newline at end of file diff --git a/skyflow/vault/controller/_connections.py b/skyflow/vault/controller/_connections.py new file mode 100644 index 00000000..2fc52f11 --- /dev/null +++ b/skyflow/vault/controller/_connections.py @@ -0,0 +1,40 @@ +import json +import requests +from skyflow.error import SkyflowError +from skyflow.utils import construct_invoke_connection_request, SkyflowMessages, get_metrics, \ + parse_invoke_connection_response +from skyflow.utils.logger import log_info +from skyflow.vault.connection import InvokeConnectionRequest + + +class Connection: + def __init__(self, vault_client): + self.__vault_client = vault_client + + def invoke(self, request: InvokeConnectionRequest): + session = requests.Session() + + config = self.__vault_client.get_config() + bearer_token = self.__vault_client.get_bearer_token(config.get("credentials")) + + connection_url = config.get("connection_url") + log_info(SkyflowMessages.Info.VALIDATING_INVOKE_CONNECTION_REQUEST.value, self.__vault_client.get_logger()) + invoke_connection_request = construct_invoke_connection_request(request, connection_url, self.__vault_client.get_logger()) + log_info(SkyflowMessages.Info.INVOKE_CONNECTION_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + + if not 'X-Skyflow-Authorization'.lower() in invoke_connection_request.headers: + invoke_connection_request.headers['x-skyflow-authorization'] = bearer_token + + invoke_connection_request.headers['sky-metadata'] = json.dumps(get_metrics()) + + log_info(SkyflowMessages.Info.INVOKE_CONNECTION_TRIGGERED, self.__vault_client.get_logger()) + + try: + response = session.send(invoke_connection_request) + session.close() + invoke_connection_response = parse_invoke_connection_response(response) + return invoke_connection_response + + except Exception as e: + raise SkyflowError(SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value, + SkyflowMessages.ErrorCodes.SERVER_ERROR.value) \ No newline at end of file diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py new file mode 100644 index 00000000..ee6a4ae5 --- /dev/null +++ b/skyflow/vault/controller/_vault.py @@ -0,0 +1,286 @@ +from skyflow.generated.rest import V1FieldRecords, RecordServiceInsertRecordBody, V1DetokenizeRecordRequest, \ + V1DetokenizePayload, V1TokenizeRecordRequest, V1TokenizePayload, QueryServiceExecuteQueryBody, \ + RecordServiceBulkDeleteRecordBody, RecordServiceUpdateRecordBody, RecordServiceBatchOperationBody, V1BatchRecord, \ + BatchRecordMethod +from skyflow.generated.rest.exceptions import BadRequestException, UnauthorizedException, ForbiddenException +from skyflow.utils import SkyflowMessages, parse_insert_response, \ + handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, \ + parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values +from skyflow.utils.logger import log_info, log_error_log +from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ + validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request +from skyflow.vault.data import InsertRequest, UpdateRequest, DeleteRequest, GetRequest, QueryRequest +from skyflow.vault.tokens import DetokenizeRequest, TokenizeRequest + +class Vault: + def __init__(self, vault_client): + self.__vault_client = vault_client + + def __initialize(self): + self.__vault_client.initialize_client_configuration() + + def __build_bulk_field_records(self, values, tokens=None): + if tokens is None: + return [V1FieldRecords(fields=record) for record in values] + else: + bulk_record_list = [] + for i, value in enumerate(values): + token = tokens[i] if tokens is not None and i < len(tokens) else None + bulk_record = V1FieldRecords( + fields=value, + tokens=token + ) + if token is not None: + bulk_record.tokens = token + bulk_record_list.append(bulk_record) + return bulk_record_list + + def __build_batch_field_records(self, values, tokens, table_name, return_tokens, upsert): + batch_record_list = [] + for i, value in enumerate(values): + token = tokens[i] if tokens is not None and i < len(tokens) else None + batch_record = V1BatchRecord( + fields=value, + table_name=table_name, + method=BatchRecordMethod.POST, + tokenization=return_tokens, + upsert=upsert, + tokens=token + ) + if token is not None: + batch_record.tokens = token + batch_record_list.append(batch_record) + return batch_record_list + + def __build_insert_body(self, request: InsertRequest): + if request.continue_on_error: + records_list = self.__build_batch_field_records( + request.values, + request.tokens, + request.table_name, + request.return_tokens, + request.upsert + ) + body = RecordServiceBatchOperationBody( + records=records_list, + continue_on_error=request.continue_on_error, + byot=request.token_mode.value + ) + return body + else: + records_list = self.__build_bulk_field_records(request.values, request.tokens) + return RecordServiceInsertRecordBody( + records=records_list, + tokenization=request.return_tokens, + upsert=request.upsert, + homogeneous=request.homogeneous, + byot=request.token_mode.value + ) + + def insert(self, request: InsertRequest): + log_info(SkyflowMessages.Info.VALIDATE_INSERT_REQUEST.value, self.__vault_client.get_logger()) + validate_insert_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.INSERT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + records_api = self.__vault_client.get_records_api() + insert_body = self.__build_insert_body(request) + + try: + log_info(SkyflowMessages.Info.INSERT_TRIGGERED.value, self.__vault_client.get_logger()) + + if request.continue_on_error: + api_response = records_api.record_service_batch_operation(self.__vault_client.get_vault_id(), + insert_body) + + else: + api_response = records_api.record_service_insert_record(self.__vault_client.get_vault_id(), + request.table_name, insert_body) + + insert_response = parse_insert_response(api_response, request.continue_on_error) + log_info(SkyflowMessages.Info.INSERT_SUCCESS.value, self.__vault_client.get_logger()) + return insert_response + + except BadRequestException as e: + log_error_log(SkyflowMessages.ErrorLogs.INSERT_RECORDS_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def update(self, request: UpdateRequest): + log_info(SkyflowMessages.Info.VALIDATE_UPDATE_REQUEST.value, self.__vault_client.get_logger()) + validate_update_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.UPDATE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + field = {key: value for key, value in request.data.items() if key != "skyflow_id"} + record = V1FieldRecords(fields=field, tokens = request.tokens) + payload = RecordServiceUpdateRecordBody(record=record, tokenization=request.return_tokens, byot=request.token_mode.value) + + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.UPDATE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_update_record( + self.__vault_client.get_vault_id(), + request.table, + request.data.get("skyflow_id"), + payload + ) + log_info(SkyflowMessages.Info.UPDATE_SUCCESS.value, self.__vault_client.get_logger()) + update_response = parse_update_record_response(api_response) + return update_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.UPDATE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def delete(self, request: DeleteRequest): + log_info(SkyflowMessages.Info.VALIDATING_DELETE_REQUEST.value, self.__vault_client.get_logger()) + validate_delete_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DELETE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.DELETE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_bulk_delete_record( + self.__vault_client.get_vault_id(), + request.table, + payload + ) + log_info(SkyflowMessages.Info.DELETE_SUCCESS.value, self.__vault_client.get_logger()) + delete_response = parse_delete_response(api_response) + return delete_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def get(self, request: GetRequest): + log_info(SkyflowMessages.Info.VALIDATE_GET_REQUEST.value, self.__vault_client.get_logger()) + validate_get_request(self.__vault_client.get_logger(), request) + if request.column_values: + request.column_values = encode_column_values(request) + log_info(SkyflowMessages.Info.GET_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.GET_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_bulk_get_record( + self.__vault_client.get_vault_id(), + object_name=request.table, + skyflow_ids=request.ids, + redaction = request.redaction_type.value if request.redaction_type is not None else None, + tokenization=request.return_tokens, + fields=request.fields, + offset=request.offset, + limit=request.limit, + download_url=request.download_url, + column_name=request.column_name, + column_values=request.column_values, + ) + log_info(SkyflowMessages.Info.GET_SUCCESS.value, self.__vault_client.get_logger()) + get_response = parse_get_response(api_response) + return get_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def query(self, request: QueryRequest): + log_info(SkyflowMessages.Info.VALIDATING_QUERY_REQUEST.value, self.__vault_client.get_logger()) + validate_query_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.QUERY_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + payload = QueryServiceExecuteQueryBody(query=request.query) + query_api = self.__vault_client.get_query_api() + try: + log_info(SkyflowMessages.Info.QUERY_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = query_api.query_service_execute_query( + self.__vault_client.get_vault_id(), + payload + ) + log_info(SkyflowMessages.Info.QUERY_SUCCESS.value, self.__vault_client.get_logger()) + query_response = parse_query_response(api_response) + return query_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def detokenize(self, request: DetokenizeRequest): + log_info(SkyflowMessages.Info.VALIDATE_DETOKENIZE_REQUEST.value, self.__vault_client.get_logger()) + validate_detokenize_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DETOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + tokens_list = [ + V1DetokenizeRecordRequest(token=token, redaction=request.redaction_type.value) + for token in request.tokens + ] + payload = V1DetokenizePayload(detokenization_parameters=tokens_list, continue_on_error=request.continue_on_error) + tokens_api = self.__vault_client.get_tokens_api() + try: + log_info(SkyflowMessages.Info.DETOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = tokens_api.record_service_detokenize( + self.__vault_client.get_vault_id(), + detokenize_payload=payload + ) + log_info(SkyflowMessages.Info.DETOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) + detokenize_response = parse_detokenize_response(api_response) + return detokenize_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def tokenize(self, request: TokenizeRequest): + log_info(SkyflowMessages.Info.VALIDATING_TOKENIZE_REQUEST.value, self.__vault_client.get_logger()) + validate_tokenize_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.TOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + + records_list = [ + V1TokenizeRecordRequest(value=item["value"], column_group=item["column_group"]) + for item in request.values + ] + payload = V1TokenizePayload(tokenization_parameters=records_list) + tokens_api = self.__vault_client.get_tokens_api() + try: + log_info(SkyflowMessages.Info.TOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = tokens_api.record_service_tokenize( + self.__vault_client.get_vault_id(), + tokenize_payload=payload + ) + tokenize_response = parse_tokenize_response(api_response) + log_info(SkyflowMessages.Info.TOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) + return tokenize_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except ForbiddenException as e: + handle_exception(e, self.__vault_client.get_logger()) \ No newline at end of file diff --git a/skyflow/vault/data/__init__.py b/skyflow/vault/data/__init__.py new file mode 100644 index 00000000..b43b23cf --- /dev/null +++ b/skyflow/vault/data/__init__.py @@ -0,0 +1,11 @@ +from ._insert_response import InsertResponse +from ._insert_request import InsertRequest +from ._get_request import GetRequest +from ._get_response import GetResponse +from ._delete_request import DeleteRequest +from ._delete_response import DeleteResponse +from ._update_request import UpdateRequest +from ._update_response import UpdateResponse +from ._upload_file_request import UploadFileRequest +from ._query_request import QueryRequest +from ._query_response import QueryResponse \ No newline at end of file diff --git a/skyflow/vault/data/_delete_request.py b/skyflow/vault/data/_delete_request.py new file mode 100644 index 00000000..9f1f2eaa --- /dev/null +++ b/skyflow/vault/data/_delete_request.py @@ -0,0 +1,4 @@ +class DeleteRequest: + def __init__(self, table, ids): + self.table = table + self.ids = ids diff --git a/skyflow/vault/data/_delete_response.py b/skyflow/vault/data/_delete_response.py new file mode 100644 index 00000000..0147c777 --- /dev/null +++ b/skyflow/vault/data/_delete_response.py @@ -0,0 +1,11 @@ +class DeleteResponse: + def __init__(self, deleted_ids = None, errors = None): + self.deleted_ids = deleted_ids + self.errors = errors + + def __repr__(self): + return f"DeleteResponse(deleted_ids={self.deleted_ids}, errors={self.errors})" + + def __str__(self): + return self.__repr__() + diff --git a/skyflow/vault/data/_get_request.py b/skyflow/vault/data/_get_request.py new file mode 100644 index 00000000..81cb21ad --- /dev/null +++ b/skyflow/vault/data/_get_request.py @@ -0,0 +1,22 @@ +class GetRequest: + def __init__(self, + table, + ids = None, + redaction_type = None, + return_tokens = False, + fields = None, + offset = None, + limit = None, + download_url = None, + column_name = None, + column_values = None): + self.table = table + self.ids = ids + self.redaction_type = redaction_type + self.return_tokens = return_tokens + self.fields = fields + self.offset = offset + self.limit = limit + self.download_url = download_url + self.column_name = column_name + self.column_values = column_values \ No newline at end of file diff --git a/skyflow/vault/data/_get_response.py b/skyflow/vault/data/_get_response.py new file mode 100644 index 00000000..cf1b0805 --- /dev/null +++ b/skyflow/vault/data/_get_response.py @@ -0,0 +1,10 @@ +class GetResponse: + def __init__(self, data=None, errors = None): + self.data = data if data else [] + self.errors = errors + + def __repr__(self): + return f"GetResponse(data={self.data}, errors={self.errors})" + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/data/_insert_request.py b/skyflow/vault/data/_insert_request.py new file mode 100644 index 00000000..742c5120 --- /dev/null +++ b/skyflow/vault/data/_insert_request.py @@ -0,0 +1,21 @@ +from skyflow.utils.enums import TokenMode + +class InsertRequest: + def __init__(self, + table_name, + values, + tokens = None, + upsert = None, + homogeneous = False, + token_mode = TokenMode.DISABLE, + return_tokens = True, + continue_on_error = False): + self.table_name = table_name + self.values = values + self.tokens = tokens + self.upsert = upsert + self.homogeneous = homogeneous + self.token_mode = token_mode + self.return_tokens = return_tokens + self.continue_on_error = continue_on_error + diff --git a/skyflow/vault/data/_insert_response.py b/skyflow/vault/data/_insert_response.py new file mode 100644 index 00000000..6407426d --- /dev/null +++ b/skyflow/vault/data/_insert_response.py @@ -0,0 +1,12 @@ +class InsertResponse: + def __init__(self, inserted_fields = None, errors=None): + if errors is None: + errors = list() + self.inserted_fields = inserted_fields + self.errors = errors + + def __repr__(self): + return f"InsertResponse(inserted_fields={self.inserted_fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_query_request.py b/skyflow/vault/data/_query_request.py new file mode 100644 index 00000000..e17cb8d0 --- /dev/null +++ b/skyflow/vault/data/_query_request.py @@ -0,0 +1,3 @@ +class QueryRequest: + def __init__(self, query): + self.query = query diff --git a/skyflow/vault/data/_query_response.py b/skyflow/vault/data/_query_response.py new file mode 100644 index 00000000..e2034758 --- /dev/null +++ b/skyflow/vault/data/_query_response.py @@ -0,0 +1,10 @@ +class QueryResponse: + def __init__(self): + self.fields = [] + self.errors = [] + + def __repr__(self): + return f"QueryResponse(fields={self.fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_update_request.py b/skyflow/vault/data/_update_request.py new file mode 100644 index 00000000..5b73c3ae --- /dev/null +++ b/skyflow/vault/data/_update_request.py @@ -0,0 +1,9 @@ +from skyflow.utils.enums import TokenMode + +class UpdateRequest: + def __init__(self, table, data, tokens = None, return_tokens = False, token_mode = TokenMode.DISABLE): + self.table = table + self.data = data + self.tokens = tokens + self.return_tokens = return_tokens + self.token_mode = token_mode diff --git a/skyflow/vault/data/_update_response.py b/skyflow/vault/data/_update_response.py new file mode 100644 index 00000000..dbbb9cc7 --- /dev/null +++ b/skyflow/vault/data/_update_response.py @@ -0,0 +1,10 @@ +class UpdateResponse: + def __init__(self, updated_field = None, errors=None): + self.updated_field = updated_field + self.errors = errors if errors is not None else [] + + def __repr__(self): + return f"UpdateResponse(updated_field={self.updated_field}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_upload_file_request.py b/skyflow/vault/data/_upload_file_request.py new file mode 100644 index 00000000..b0da1e03 --- /dev/null +++ b/skyflow/vault/data/_upload_file_request.py @@ -0,0 +1,3 @@ +class UploadFileRequest: + def __init__(self): + pass diff --git a/skyflow/vault/tokens/__init__.py b/skyflow/vault/tokens/__init__.py new file mode 100644 index 00000000..ddfe8ad7 --- /dev/null +++ b/skyflow/vault/tokens/__init__.py @@ -0,0 +1,4 @@ +from ._detokenize_request import DetokenizeRequest +from ._detokenize_response import DetokenizeResponse +from ._tokenize_request import TokenizeRequest +from ._tokenize_response import TokenizeResponse \ No newline at end of file diff --git a/skyflow/vault/tokens/_detokenize_request.py b/skyflow/vault/tokens/_detokenize_request.py new file mode 100644 index 00000000..5e3bc041 --- /dev/null +++ b/skyflow/vault/tokens/_detokenize_request.py @@ -0,0 +1,7 @@ +from skyflow.utils.enums.redaction_type import RedactionType + +class DetokenizeRequest: + def __init__(self, tokens, redaction_type = RedactionType.PLAIN_TEXT, continue_on_error = False): + self.tokens = tokens + self.redaction_type = redaction_type + self.continue_on_error = continue_on_error \ No newline at end of file diff --git a/skyflow/vault/tokens/_detokenize_response.py b/skyflow/vault/tokens/_detokenize_response.py new file mode 100644 index 00000000..f42e3917 --- /dev/null +++ b/skyflow/vault/tokens/_detokenize_response.py @@ -0,0 +1,12 @@ +class DetokenizeResponse: + def __init__(self, detokenized_fields = None, errors = None): + self.detokenized_fields = detokenized_fields + self.errors = errors + + def __repr__(self): + return f"DetokenizeResponse(detokenized_fields={self.detokenized_fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() + + diff --git a/skyflow/vault/tokens/_tokenize_request.py b/skyflow/vault/tokens/_tokenize_request.py new file mode 100644 index 00000000..a1e7c2bc --- /dev/null +++ b/skyflow/vault/tokens/_tokenize_request.py @@ -0,0 +1,3 @@ +class TokenizeRequest: + def __init__(self, values): + self.values = values diff --git a/skyflow/vault/tokens/_tokenize_response.py b/skyflow/vault/tokens/_tokenize_response.py new file mode 100644 index 00000000..264b3987 --- /dev/null +++ b/skyflow/vault/tokens/_tokenize_response.py @@ -0,0 +1,11 @@ +class TokenizeResponse: + def __init__(self, tokenized_fields = None): + self.tokenized_fields = tokenized_fields + + + def __repr__(self): + return f"TokenizeResponse(tokenized_fields={self.tokenized_fields})" + + def __str__(self): + return self.__repr__() + diff --git a/skyflow/version.py b/skyflow/version.py deleted file mode 100644 index 4d6b1a07..00000000 --- a/skyflow/version.py +++ /dev/null @@ -1 +0,0 @@ -SDK_VERSION = '1.15.1' \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py index d803f19f..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/client/__init__.py b/tests/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/client/test_skyflow.py b/tests/client/test_skyflow.py new file mode 100644 index 00000000..621cdee0 --- /dev/null +++ b/tests/client/test_skyflow.py @@ -0,0 +1,332 @@ +import unittest +from unittest.mock import patch + +from skyflow import LogLevel, Env +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow import Skyflow + +VALID_VAULT_CONFIG = { + "vault_id": "VAULT_ID", + "cluster_id": "CLUSTER_ID", + "env": Env.DEV, + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +INVALID_VAULT_CONFIG = { + "cluster_id": "CLUSTER_ID", # Missing vault_id + "env": Env.DEV, + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +VALID_CONNECTION_CONFIG = { + "connection_id": "CONNECTION_ID", + "connection_url": "https://CONNECTION_URL", + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +INVALID_CONNECTION_CONFIG = { + "connection_url": "https://CONNECTION_URL", + # Missing connection_id + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +VALID_CREDENTIALS = { + "path": "/path/to/valid_credentials.json" +} + +class TestSkyflow(unittest.TestCase): + + def setUp(self): + self.builder = Skyflow.builder() + + def test_add_vault_config_success(self): + builder = self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.assertIn(VALID_VAULT_CONFIG, self.builder._Builder__vault_list) + self.assertEqual(builder, self.builder) + + def test_add_already_exists_vault_config(self): + builder = self.builder.add_vault_config(VALID_VAULT_CONFIG) + with self.assertRaises(SkyflowError) as context: + builder.add_vault_config(VALID_VAULT_CONFIG) + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_ALREADY_EXISTS.value.format(VALID_VAULT_CONFIG.get("vault_id"))) + + + def test_add_vault_config_invalid(self): + with self.assertRaises(SkyflowError) as context: + self.builder.add_vault_config(INVALID_VAULT_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_VAULT_ID.value) + + def test_remove_vault_config_valid(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + result = self.builder.remove_vault_config(VALID_VAULT_CONFIG['vault_id']) + + self.assertNotIn(VALID_VAULT_CONFIG['vault_id'], self.builder._Builder__vault_configs) + + @patch('skyflow.client.skyflow.log_error') + def test_remove_vault_config_invalid(self, mock_log_error): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.remove_vault_config("invalid_id") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_VAULT_ID.value) + + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_update_vault_config_valid(self, mock_validate): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + updated_config = VALID_VAULT_CONFIG.copy() + updated_config["cluster_id"] = "test.cluster" + self.builder.update_vault_config(updated_config) + mock_validate.assert_called_once() + + def test_get_vault(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + + config = self.builder.get_vault_config(VALID_VAULT_CONFIG["vault_id"]) + + self.assertEqual(self.builder._Builder__vault_list[0], VALID_VAULT_CONFIG) + + def test_get_vault_with_vault_id_none(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + vault = self.builder.get_vault_config(None) + config = vault.get("vault_client").get_config() + self.assertEqual(self.builder._Builder__vault_list[0], config) + + def test_get_vault_with_empty_vault_list_when_vault_id_is_none_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config(None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_VAULT_CONFIGS.value) + + def test_get_vault_with_invalid_vault_id_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config('invalid_id') + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format('invalid_id')) + + def test_get_vault_with_invalid_vault_id_and_non_empty_list_raises_error(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config('invalid_vault_id') + + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format("invalid_vault_id")) + + + @patch('skyflow.client.skyflow.validate_vault_config') + def test_build_calls_validate_vault_config(self, mock_validate_vault_config): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + mock_validate_vault_config.assert_called_once_with(self.builder._Builder__logger, VALID_VAULT_CONFIG) + + def test_get_log_level(self): + builder = self.builder.set_log_level(LogLevel.ERROR) + client = self.builder.build() + self.assertEqual(LogLevel.ERROR, client.get_log_level()) + + def test_add_connection_config_valid(self): + result = self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + + self.assertIn(VALID_CONNECTION_CONFIG, self.builder._Builder__connection_list) + self.assertEqual(result, self.builder) + + def test_add_already_exists_connection_config(self): + connection_id = VALID_CONNECTION_CONFIG.get("connection_id") + builder = self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + + with self.assertRaises(SkyflowError) as context: + builder.add_connection_config(VALID_CONNECTION_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_ALREADY_EXISTS.value.format(connection_id)) + + def test_add_connection_config_invalid(self): + with self.assertRaises(SkyflowError) as context: + self.builder.add_connection_config(INVALID_CONNECTION_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CONNECTION_ID.value) + + def test_remove_connection_config_valid(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + result = self.builder.remove_connection_config(VALID_CONNECTION_CONFIG.get("connection_id")) + + self.assertNotIn(VALID_CONNECTION_CONFIG.get("connection_id"), self.builder._Builder__connection_configs) + + + @patch('skyflow.client.skyflow.log_error') + def test_remove_connection_config_invalid(self, mock_log_error): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.remove_connection_config("invalid_id") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CONNECTION_ID.value) + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_update_connection_config_valid(self, mock_validate): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + updated_config = VALID_CONNECTION_CONFIG.copy() + updated_config["connection_url"] = "test_url" + self.builder.update_connection_config(updated_config) + mock_validate.assert_called_once() + + def test_get_connection_config(self): + connection_id = VALID_CONNECTION_CONFIG.get("connection_id") + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + + connection = self.builder.get_connection_config(connection_id) + config = connection.get("vault_client").get_config() + self.assertEqual(self.builder._Builder__connection_list[0], config) + + def test_get_connection_config_with_connection_id_none(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + self.builder.get_connection_config(None) + self.assertEqual(self.builder._Builder__connection_list[0], VALID_CONNECTION_CONFIG) + + def test_get_connection_with_empty_connection_list_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config('invalid_id') + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format('invalid_id')) + + def test_get_connection_with_invalid_connection_id_raises_error(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config('invalid_connection_id') + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format('invalid_connection_id')) + + def test_get_connection_with_invalid_connection_id_and_empty_list_raises_Error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config(None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CONNECTION_CONFIGS.value) + + @patch('skyflow.client.skyflow.validate_connection_config') + def test_build_calls_validate_connection_config(self, mock_validate): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + mock_validate.assert_called_once_with(self.builder._Builder__logger, VALID_CONNECTION_CONFIG) + + + def test_build_valid(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG).add_connection_config(VALID_CONNECTION_CONFIG) + client = self.builder.build() + self.assertIsInstance(client, Skyflow) + + def test_set_log_level(self): + self.builder.set_log_level(LogLevel.INFO) + self.assertEqual(self.builder._Builder__log_level, LogLevel.INFO) + + def test_invalid_credentials(self): + builder = self.builder.add_skyflow_credentials(VALID_CREDENTIALS) + builder.add_connection_config(VALID_CONNECTION_CONFIG) + builder.add_vault_config(VALID_VAULT_CONFIG) + builder.build() + self.assertEqual(VALID_CREDENTIALS, self.builder._Builder__skyflow_credentials) + self.assertEqual(builder, self.builder) + + @patch('skyflow.client.skyflow.validate_vault_config') + def test_skyflow_client_add_remove_vault_config(self, mock_validate_vault_config): + skyflow_client = self.builder.add_vault_config(VALID_VAULT_CONFIG).build() + new_config = VALID_VAULT_CONFIG.copy() + new_config['vault_id'] = "VAULT_ID" + skyflow_client.add_vault_config(new_config) + + assert mock_validate_vault_config.call_count == 2 + + self.assertEqual("VAULT_ID", + skyflow_client.get_vault_config(new_config['vault_id']).get("vault_id")) + + skyflow_client.remove_vault_config(new_config['vault_id']) + with self.assertRaises(SkyflowError) as context: + skyflow_client.get_vault_config(new_config['vault_id']).get("vault_id") + + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format( + new_config['vault_id'])) + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_skyflow_client_update_and_get_vault_config(self, mock_update_config): + skyflow_client = self.builder.add_vault_config(VALID_VAULT_CONFIG).build() + new_config = VALID_VAULT_CONFIG.copy() + new_config['env'] = Env.SANDBOX + skyflow_client.update_vault_config(new_config) + mock_update_config.assert_called_once() + + vault = skyflow_client.get_vault_config(VALID_VAULT_CONFIG.get("vault_id")) + + self.assertEqual(VALID_VAULT_CONFIG.get("vault_id"), vault.get("vault_id")) + + @patch('skyflow.client.skyflow.validate_connection_config') + def test_skyflow_client_add_remove_connection_config(self, mock_validate_connection_config): + skyflow_client = self.builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + new_config = VALID_CONNECTION_CONFIG.copy() + new_config['connection_id'] = "CONNECTION_ID" + skyflow_client.add_connection_config(new_config) + + assert mock_validate_connection_config.call_count == 2 + self.assertEqual("CONNECTION_ID", skyflow_client.get_connection_config(new_config['connection_id']).get("connection_id")) + + skyflow_client.remove_connection_config("CONNECTION_ID") + with self.assertRaises(SkyflowError) as context: + skyflow_client.get_connection_config(new_config['connection_id']).get("connection_id") + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format(new_config['connection_id'])) + + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_skyflow_client_update_and_get_connection_config(self, mock_update_config): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + new_config = VALID_CONNECTION_CONFIG.copy() + new_config['connection_url'] = 'updated_url' + skyflow_client.update_connection_config(new_config) + mock_update_config.assert_called_once() + + connection = skyflow_client.get_connection_config(VALID_CONNECTION_CONFIG.get("connection_id")) + + self.assertEqual(VALID_CONNECTION_CONFIG.get("connection_id"), connection.get("connection_id")) + + def test_skyflow_add_and_update_skyflow_credentials(self): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + skyflow_client.add_skyflow_credentials(VALID_CREDENTIALS) + + self.assertEqual(VALID_CREDENTIALS, builder._Builder__skyflow_credentials) + + new_credentials = VALID_CREDENTIALS.copy() + new_credentials['path'] = 'path/to/new_credentials' + + skyflow_client.update_skyflow_credentials(new_credentials) + + self.assertEqual(new_credentials, builder._Builder__skyflow_credentials) + + + def test_skyflow_add_and_update_log_level(self): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + skyflow_client.set_log_level(LogLevel.INFO) + + self.assertEqual(LogLevel.INFO, builder._Builder__log_level) + + skyflow_client.update_log_level(LogLevel.ERROR) + self.assertEqual(LogLevel.ERROR, builder._Builder__log_level) + + + @patch('skyflow.client.Skyflow.Builder.get_vault_config') + def test_skyflow_vault_and_connection_method(self, mock_get_vault_config): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).add_vault_config(VALID_VAULT_CONFIG).build() + skyflow_client.vault() + skyflow_client.connection() + mock_get_vault_config.assert_called_once() \ No newline at end of file diff --git a/tests/service_account/__init__.py b/tests/service_account/__init__.py index d803f19f..e69de29b 100644 --- a/tests/service_account/__init__.py +++ b/tests/service_account/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/service_account/data/invalidJson.json b/tests/service_account/data/invalidJson.json deleted file mode 100644 index d857aa37..00000000 --- a/tests/service_account/data/invalidJson.json +++ /dev/null @@ -1 +0,0 @@ -{"a"} \ No newline at end of file diff --git a/tests/service_account/data/invalidPrivateKey.json b/tests/service_account/data/invalidPrivateKey.json deleted file mode 100644 index 9c2ff417..00000000 --- a/tests/service_account/data/invalidPrivateKey.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "privateKey": "invalid key", - "clientID": "Some client ID", - "keyID": "Some key ID", - "tokenURI": "https://unknown.org" -} \ No newline at end of file diff --git a/tests/service_account/data/noClientID.json b/tests/service_account/data/noClientID.json deleted file mode 100644 index 10699c21..00000000 --- a/tests/service_account/data/noClientID.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "privateKey": "Some private key" -} \ No newline at end of file diff --git a/tests/service_account/data/noKeyID.json b/tests/service_account/data/noKeyID.json deleted file mode 100644 index 9b93da90..00000000 --- a/tests/service_account/data/noKeyID.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "privateKey": "Some private key", - "clientID": "Some client ID" -} \ No newline at end of file diff --git a/tests/service_account/data/noPrivateKey.json b/tests/service_account/data/noPrivateKey.json deleted file mode 100644 index 9b2280ee..00000000 --- a/tests/service_account/data/noPrivateKey.json +++ /dev/null @@ -1 +0,0 @@ -{"data": "this has no data"} \ No newline at end of file diff --git a/tests/service_account/data/noTokenURI.json b/tests/service_account/data/noTokenURI.json deleted file mode 100644 index f7d86552..00000000 --- a/tests/service_account/data/noTokenURI.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "privateKey": "Some private key", - "clientID": "Some client ID", - "keyID": "Some key ID" -} \ No newline at end of file diff --git a/tests/service_account/invalid_creds.json b/tests/service_account/invalid_creds.json new file mode 100644 index 00000000..ab0c0141 --- /dev/null +++ b/tests/service_account/invalid_creds.json @@ -0,0 +1 @@ +// \ No newline at end of file diff --git a/tests/service_account/test__utils.py b/tests/service_account/test__utils.py new file mode 100644 index 00000000..7ffb36df --- /dev/null +++ b/tests/service_account/test__utils.py @@ -0,0 +1,146 @@ +import unittest +import time +import jwt +import json +from unittest.mock import patch +import os +from skyflow.error import SkyflowError +from skyflow.service_account import is_expired, generate_bearer_token, \ + generate_bearer_token_from_creds +from skyflow.utils import SkyflowMessages +from skyflow.service_account._utils import get_service_account_token, get_signed_jwt, generate_signed_data_tokens, get_signed_data_token_response_object, generate_signed_data_tokens_from_creds + +creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") +with open(creds_path, 'r') as file: + credentials = json.load(file) + +VALID_CREDENTIALS_STRING = json.dumps(credentials) + +CREDENTIALS_WITHOUT_CLIENT_ID = { + 'privateKey': 'private_key' +} + +CREDENTIALS_WITHOUT_KEY_ID = { + 'privateKey': 'private_key', + 'clientID': 'client_id' +} + +CREDENTIALS_WITHOUT_TOKEN_URI = { + 'privateKey': 'private_key', + 'clientID': 'client_id', + 'keyID': 'key_id' +} + +VALID_SERVICE_ACCOUNT_CREDS = credentials + +class TestServiceAccountUtils(unittest.TestCase): + def test_is_expired_empty_token(self): + self.assertTrue(is_expired("")) + + def test_is_expired_non_expired_token(self): + future_time = time.time() + 1000 + token = jwt.encode({"exp": future_time}, key="test", algorithm="HS256") + self.assertFalse(is_expired(token)) + + def test_is_expired_expired_token(self): + past_time = time.time() - 1000 + token = jwt.encode({"exp": past_time}, key="test", algorithm="HS256") + self.assertTrue(is_expired(token)) + + @patch("skyflow.utils.logger._log_helpers.log_error_log") + @patch("jwt.decode", side_effect=Exception("Some error")) + def test_is_expired_general_exception(self, mock_jwt_decode, mock_log_error): + token = jwt.encode({"exp": time.time() + 1000}, key="test", algorithm="HS256") + self.assertTrue(is_expired(token)) + + @patch("builtins.open", side_effect=FileNotFoundError) + def test_generate_bearer_token_invalid_file_path(self, mock_open): + with self.assertRaises(SkyflowError) as context: + generate_bearer_token("invalid_path") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value) + + @patch("json.load", side_effect=json.JSONDecodeError("Expecting value", "", 0)) + def test_generate_bearer_token_invalid_json(self, mock_json_load): + creds_path = os.path.join(os.path.dirname(__file__), "invalid_creds.json") + with self.assertRaises(SkyflowError) as context: + generate_bearer_token(creds_path) + self.assertEqual(context.exception.message, SkyflowMessages.Error.FILE_INVALID_JSON.value.format(creds_path)) + + @patch("skyflow.service_account._utils.get_service_account_token") + def test_generate_bearer_token_valid_file_path(self, mock_generate_bearer_token): + creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") + generate_bearer_token(creds_path) + mock_generate_bearer_token.assert_called_once() + + @patch("skyflow.service_account._utils.get_service_account_token") + def test_generate_bearer_token_from_creds_with_valid_json_string(self, mock_generate_bearer_token): + generate_bearer_token_from_creds(VALID_CREDENTIALS_STRING) + mock_generate_bearer_token.assert_called_once() + + def test_generate_bearer_token_from_creds_invalid_json(self): + with self.assertRaises(SkyflowError) as context: + generate_bearer_token_from_creds("invalid_json") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value) + + def test_get_service_account_token_missing_private_key(self): + incomplete_credentials = {} + with self.assertRaises(SkyflowError) as context: + get_service_account_token(incomplete_credentials, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_PRIVATE_KEY.value) + + def test_get_service_account_token_missing_client_id_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_CLIENT_ID, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_CLIENT_ID.value) + + def test_get_service_account_token_missing_key_id_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_KEY_ID, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_KEY_ID.value) + + def test_get_service_account_token_missing_token_uri_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_TOKEN_URI, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_TOKEN_URI.value) + + def test_get_service_account_token_with_valid_credentials(self): + access_token, _ = get_service_account_token(VALID_SERVICE_ACCOUNT_CREDS, {}, None) + self.assertTrue(access_token) + + + @patch("jwt.encode", side_effect=Exception) + def test_get_signed_jwt_invalid_format(self, mock_jwt_encode): + with self.assertRaises(SkyflowError) as context: + get_signed_jwt({}, "client_id", "key_id", "token_uri", "private_key", None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.JWT_INVALID_FORMAT.value) + + def test_get_signed_data_token_response_object(self): + token = "sample_token" + signed_token = "signed_sample_token" + response = get_signed_data_token_response_object(signed_token, token) + self.assertEqual(response[0], token) + self.assertEqual(response[1], signed_token) + + def test_generate_signed_data_tokens_from_file_path(self): + creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") + options = {"data_tokens": ["token1", "token2"], "ctx": 'ctx'} + result = generate_signed_data_tokens(creds_path, options) + self.assertEqual(len(result), 2) + + def test_generate_signed_data_tokens_from_invalid_file_path(self): + options = {"data_tokens": ["token1", "token2"]} + with self.assertRaises(SkyflowError) as context: + result = generate_signed_data_tokens('credentials1.json', options) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value) + + def test_generate_signed_data_tokens_from_creds(self): + options = {"data_tokens": ["token1", "token2"]} + result = generate_signed_data_tokens_from_creds(VALID_CREDENTIALS_STRING, options) + self.assertEqual(len(result), 2) + + def test_generate_signed_data_tokens_from_creds_with_invalid_string(self): + options = {"data_tokens": ["token1", "token2"]} + credentials_string = '{' + with self.assertRaises(SkyflowError) as context: + result = generate_signed_data_tokens_from_creds(credentials_string, options) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value) \ No newline at end of file diff --git a/tests/service_account/test_generate_bearer_token.py b/tests/service_account/test_generate_bearer_token.py deleted file mode 100644 index 586db1ca..00000000 --- a/tests/service_account/test_generate_bearer_token.py +++ /dev/null @@ -1,171 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os -from dotenv import dotenv_values -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, generate_bearer_token -from skyflow.errors._skyflow_errors import * -import json -from skyflow.service_account._token import getSignedJWT, getResponseToken, sendRequestWithToken - - -class TestGenerateBearerToken(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join( - os.getcwd(), 'tests/service_account/data/') - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testWithInvalidFilePath(self): - try: - generate_bearer_token('unknownfilepath') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.FILE_NOT_FOUND.value % ('unknownfilepath')) - - def testInvalidJSON(self): - path = self.getDataPath('empty') - try: - generate_bearer_token(path) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.FILE_INVALID_JSON.value % (path)) - - def testWithNoPrivateKey(self): - try: - generate_bearer_token(self.getDataPath('noPrivateKey')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testWithNoClientID(self): - try: - generate_bearer_token(self.getDataPath('noClientID')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_CLIENT_ID.value) - - def testWithNoKeyID(self): - try: - generate_bearer_token(self.getDataPath('noKeyID')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_KEY_ID.value) - - def testWithNoTokenURI(self): - try: - generate_bearer_token(self.getDataPath('noTokenURI')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_TOKEN_URI.value) - - def testInvalidCreds(self): - try: - generate_bearer_token(self.getDataPath('invalidPrivateKey')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.JWT_INVALID_FORMAT.value) - - def testGenerateBearerTokenFromCredsInvalid(self): - creds_file = open(self.getDataPath('invalidPrivateKey'), 'r') - credentialsString = json.dumps(creds_file.read()) - creds_file.close() - try: - generate_bearer_token_from_creds(credentialsString) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testGenerateBearerTokenFromCredsFail(self): - env_values = dotenv_values('.env') - credentials_path = env_values['CREDENTIALS_FILE_PATH'] - creds_file = open(credentials_path, 'r') - credentialsString = json.dumps(creds_file.read()) - try: - generate_bearer_token_from_creds(credentialsString) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testNonExistentFileArg(self): - try: - generate_bearer_token('non-existent-file.json') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FILE_NOT_FOUND.value % 'non-existent-file.json') - - def testInvalidJSONInCreds(self): - filepath = self.getDataPath('invalidJson') - try: - generate_bearer_token(filepath) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FILE_INVALID_JSON.value % filepath) - try: - generate_bearer_token_from_creds(self.getDataPath('invalid-json')) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_CREDENTIALS.value) - - def testGenerateToken(self): - try: - generate_bearer_token(self.getDataPath('invalid-json')) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - - def testGetSignedJWTInvalidValue(self): - try: - getSignedJWT('{}clientID', 'keyId', - 'privateKey', 'ww.tokenURI.com') - self.fail('invalid jwt signed') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.JWT_INVALID_FORMAT.value) - - def testGetResponseTokenNoType(self): - try: - getResponseToken({'accessToken': 'only access token'}) - self.fail('Should throw') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.SERVER_ERROR.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_TOKEN_TYPE.value) - - def testGetResponseTokenNoAccessToken(self): - try: - getResponseToken({'tokenType': 'only token type'}) - self.fail('Should throw') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.SERVER_ERROR.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_ACCESS_TOKEN.value) - - def testSendRequestInvalidUrl(self): - try: - sendRequestWithToken('invalidurl', 'invalid-token') - self.fail('Not throwing on invalid url') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.INVALID_URL.value % 'invalidurl') diff --git a/tests/service_account/test_sa_token_utils.py b/tests/service_account/test_sa_token_utils.py deleted file mode 100644 index 737439f2..00000000 --- a/tests/service_account/test_sa_token_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import os -import unittest -from dotenv import dotenv_values - -from skyflow.service_account._token import * -from skyflow.service_account import is_expired - - -class TestGenerateBearerToken(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join( - os.getcwd(), 'tests/service_account/data/') - return super().setUp() - - def testIsExpiredInvalidToken(self): - try: - token = 'invalid token' - self.assertEqual(True, is_expired(token)) - except SkyflowError as se: - self.fail('raised exception for invalid token') - - def testIsExpiredEmptyToken(self): - try: - self.assertEqual(True, is_expired('')) - except SkyflowError as se: - self.fail('Error '+str(se.message)) - - def testIsExpiredTokenExpred(self): - expiredToken = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJodHRwczovL21hbmFnZS5za3lmbG93YXBpcy5jb20iLCJjbGkiOiJrOWZkN2ZiMzcyMDI0NDhiYmViOGNkNmUyYzQ4NTdkOSIsImV4cCI6MTY0NzI1NjM3NCwiaWF0IjoxNjQ3MjU2MzE1LCJpc3MiOiJzYS1hdXRoQG1hbmFnZS5za3lmbG93YXBpcy5jb20iLCJqdGkiOiJnYTMyZWJhMGJlMzQ0YWRmYjQxMzRjN2Y2ZTIzZjllMCIsInNjcCI6WyJyb2xlOnM1OTdjNzNjYjhjOTRlMjk4YzhlZjZjNzE0M2U0OWMyIl0sInN1YiI6InRlc3Qgc3ZjIGFjYyJ9.OrkSyNtXOVtfL3JNYaArlmUFg0txJFV6o3SE_wadPwZ_h1BtMuoKPo1LOAe-4HhS16i34HcfTTiHmg2ksx5KbD_sdx1intaDWZGXs-6TPvDK8mdFrBblp3nP1y1O_PHEnCMmPD3haZVMj_9jyTKPb6R8qBbMjr-UzXAUCCTiq9XqEd81wY8FsZeKwSQFqbdFdECaPsk8m-k8s7BKc_VLtHXdYXp4vNgjgleSeX4nHHhU1w0y18q2_tPwgLG-MZ2I7pF60Owk9T7f7gSuCpVfa6zYvpYiYFjQayFmYc6tJgEuOyGD_VFKKUUW4TszeNyJOCF15dPDO2JIeGh3xDJ8PA' - try: - self.assertEqual(True, is_expired(expiredToken)) - except SkyflowError: - self.fail('raised error for expired token') diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils/logger/__init__.py b/tests/utils/logger/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils/logger/test__log_helpers.py b/tests/utils/logger/test__log_helpers.py new file mode 100644 index 00000000..1ea50d45 --- /dev/null +++ b/tests/utils/logger/test__log_helpers.py @@ -0,0 +1,86 @@ +import unittest +from unittest.mock import Mock, patch + +from skyflow import LogLevel +from skyflow.utils.logger import log_info, log_error + + +class TestLoggingFunctions(unittest.TestCase): + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_info_with_logger(self, MockLogger): + mock_logger = MockLogger() + message = "Info message" + interface = "InterfaceA" + + log_info(message, mock_logger) + + mock_logger.info.assert_called_once_with(f"{message}") + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_info_without_logger(self, MockLogger): + try: + log_info("Message", None) + except AttributeError: + self.fail("log_info raised AttributeError unexpectedly!") + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_with_all_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Error message" + http_code = 404 + grpc_code = 5 + http_status = "Not Found" + request_id = "12345" + details = {"info": "Detailed error information"} + + log_error(message, http_code, request_id, grpc_code, http_status, details, mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message, + 'grpc_code': grpc_code, + 'http_status': http_status, + 'request_id': request_id, + 'details': details + } + + mock_logger.error.assert_called_once_with(expected_log_data) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_with_minimal_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Minimal error" + http_code = 400 + + log_error(message, http_code, logger=mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message + } + + mock_logger.error.assert_called_once_with(expected_log_data) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_creates_logger_if_none(self, MockLogger): + message = "Auto-created logger error" + http_code = 500 + + log_error(message, http_code) + + MockLogger.assert_called_once_with(LogLevel.ERROR) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_handles_missing_optional_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Test missing optional fields" + http_code = 503 + + log_error(message, http_code, logger=mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message + } + mock_logger.error.assert_called_once_with(expected_log_data) diff --git a/tests/utils/logger/test__logger.py b/tests/utils/logger/test__logger.py new file mode 100644 index 00000000..cdfcf13f --- /dev/null +++ b/tests/utils/logger/test__logger.py @@ -0,0 +1,101 @@ +import unittest +from unittest.mock import patch, Mock +import logging +from skyflow import LogLevel +from skyflow.utils.logger import Logger + + +class TestLogger(unittest.TestCase): + + @patch('logging.getLogger') + def test_logger_initialization_with_default_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger() + + self.assertEqual(logger.current_level, LogLevel.ERROR) + mock_logger_instance.setLevel.assert_called_once_with(logging.ERROR) + + @patch('logging.getLogger') + def test_logger_initialization_with_custom_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.INFO) + + self.assertEqual(logger.current_level, LogLevel.INFO) + mock_logger_instance.setLevel.assert_called_once_with(logging.INFO) + + @patch('logging.getLogger') + def test_set_log_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger() + logger.set_log_level(LogLevel.DEBUG) + + self.assertEqual(logger.current_level, LogLevel.DEBUG) + mock_logger_instance.setLevel.assert_called_with(logging.DEBUG) + + @patch('logging.getLogger') + def test_debug_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.DEBUG) + logger.debug("Debug message") + + mock_logger_instance.debug.assert_called_once_with("Debug message") + + @patch('logging.getLogger') + def test_info_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.INFO) + logger.info("Info message") + + mock_logger_instance.info.assert_called_once_with("Info message") + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_warn_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.WARN) + logger.warn("Warn message") + + mock_logger_instance.warning.assert_called_once_with("Warn message") + mock_logger_instance.info.assert_not_called() + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_error_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.ERROR) + logger.error("Error message") + + mock_logger_instance.error.assert_called_once_with("Error message") + mock_logger_instance.warning.assert_not_called() + mock_logger_instance.info.assert_not_called() + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_logging_with_level_off(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.OFF) + logger.debug("Debug message") + logger.info("Info message") + logger.warn("Warn message") + logger.error("Error message") + + mock_logger_instance.debug.assert_not_called() + mock_logger_instance.info.assert_not_called() + mock_logger_instance.warning.assert_not_called() + mock_logger_instance.error.assert_not_called() \ No newline at end of file diff --git a/tests/utils/test__helpers.py b/tests/utils/test__helpers.py new file mode 100644 index 00000000..8b55abf3 --- /dev/null +++ b/tests/utils/test__helpers.py @@ -0,0 +1,38 @@ +import unittest +from skyflow.utils import get_base_url, format_scope + +VALID_URL = "https://example.com/path?query=1" +BASE_URL = "https://example.com" +EMPTY_URL = "" +INVALID_URL = "invalid-url" +SCOPES_LIST = ["admin", "user", "viewer"] +FORMATTED_SCOPES = "role:admin role:user role:viewer" + +class TestHelperFunctions(unittest.TestCase): + def test_get_base_url_valid_url(self): + self.assertEqual(get_base_url(VALID_URL), BASE_URL) + + def test_get_base_url_empty_url(self): + self.assertEqual(get_base_url(EMPTY_URL), "://") + + def test_get_base_url_invalid_url(self): + self.assertEqual(get_base_url(INVALID_URL), "://") + + def test_format_scope_valid_scopes(self): + self.assertEqual(format_scope(SCOPES_LIST), FORMATTED_SCOPES) + + def test_format_scope_empty_list(self): + self.assertIsNone(format_scope([])) + + def test_format_scope_none(self): + self.assertIsNone(format_scope(None)) + + def test_format_scope_single_scope(self): + single_scope = ["admin"] + expected_result = "role:admin" + self.assertEqual(format_scope(single_scope), expected_result) + + def test_format_scope_special_characters(self): + scopes_with_special_chars = ["admin", "user:write", "read-only"] + expected_result = "role:admin role:user:write role:read-only" + self.assertEqual(format_scope(scopes_with_special_chars), expected_result) \ No newline at end of file diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py new file mode 100644 index 00000000..c9010c98 --- /dev/null +++ b/tests/utils/test__utils.py @@ -0,0 +1,417 @@ +import unittest +from unittest.mock import patch, Mock +import os +import json +from unittest.mock import MagicMock +from urllib.parse import quote +from requests import PreparedRequest +from requests.models import HTTPError +from skyflow.error import SkyflowError +from skyflow.utils import get_credentials, SkyflowMessages, get_vault_url, construct_invoke_connection_request, \ + parse_insert_response, parse_update_record_response, parse_delete_response, parse_get_response, \ + parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_invoke_connection_response, \ + handle_exception, validate_api_key, encode_column_values +from skyflow.utils._utils import parse_path_params, to_lowercase_keys, get_metrics +from skyflow.utils.enums import EnvUrls, Env, ContentType +from skyflow.vault.connection import InvokeConnectionResponse +from skyflow.vault.data import InsertResponse, DeleteResponse, GetResponse, QueryResponse +from skyflow.vault.tokens import DetokenizeResponse, TokenizeResponse + +creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") +with open(creds_path, 'r') as file: + credentials = json.load(file) + +TEST_ERROR_MESSAGE = "Test error message." +VALID_ENV_CREDENTIALS = credentials + +class TestUtils(unittest.TestCase): + + @patch.dict(os.environ, {"SKYFLOW_CREDENTIALS": json.dumps(VALID_ENV_CREDENTIALS)}) + def test_get_credentials_env_variable(self): + credentials = get_credentials() + credentials_string = credentials.get('credentials_string') + self.assertEqual(credentials_string, json.dumps(VALID_ENV_CREDENTIALS).replace('\n', '\\n')) + + def test_get_credentials_with_config_level_creds(self): + test_creds = {"authToken": "test_token"} + creds = get_credentials(config_level_creds=test_creds) + self.assertEqual(creds, test_creds) + + def test_get_credentials_with_common_creds(self): + test_creds = {"authToken": "test_token"} + creds = get_credentials(common_skyflow_creds=test_creds) + self.assertEqual(creds, test_creds) + + def test_get_vault_url_valid(self): + valid_cluster_id = "testCluster" + valid_env = Env.DEV + valid_vault_id = "vault123" + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + expected_url = f"https://{valid_cluster_id}.vault.skyflowapis.dev" + self.assertEqual(url, expected_url) + + def test_get_vault_url_with_invalid_cluster_id(self): + valid_cluster_id = "" + valid_env = Env.DEV + valid_vault_id = "vault123" + with self.assertRaises(SkyflowError) as context: + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(valid_vault_id)) + + def test_get_vault_url_with_invalid_env(self): + valid_cluster_id = "cluster_id" + valid_env =EnvUrls.DEV + valid_vault_id = "vault123" + with self.assertRaises(SkyflowError) as context: + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_ENV.value.format(valid_vault_id)) + + def test_parse_path_params(self): + url = "https://example.com/{param1}/{param2}" + path_params = {"param1": "value1", "param2": "value2"} + parsed_url = parse_path_params(url, path_params) + self.assertEqual(parsed_url, "https://example.com/value1/value2") + + def test_to_lowercase_keys(self): + input_dict = {"Key1": "value1", "KEY2": "value2"} + expected_output = {"key1": "value1", "key2": "value2"} + self.assertEqual(to_lowercase_keys(input_dict), expected_output) + + def test_get_metrics(self): + metrics = get_metrics() + self.assertIn('sdk_name_version', metrics) + self.assertIn('sdk_client_device_model', metrics) + self.assertIn('sdk_client_os_details', metrics) + self.assertIn('sdk_runtime_details', metrics) + + + def test_construct_invoke_connection_request_valid(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.JSON.value} + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + expected_url = parse_path_params(connection_url, mock_connection_request.path_params) + "?query=test" + self.assertEqual(result.url, expected_url) + + self.assertEqual(result.method, "POST") + self.assertEqual(result.headers['Content-Type'], ContentType.JSON.value) + + self.assertEqual(result.body, json.dumps(mock_connection_request.body)) + + def test_construct_invoke_connection_request_with_invalid_headers(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = [] + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + with self.assertRaises(SkyflowError) as context: + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value) + + def test_construct_invoke_connection_request_with_invalid_request_method(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.JSON.value} + mock_connection_request.body = {"key": "value"} + mock_connection_request.method = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + with self.assertRaises(SkyflowError) as context: + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_METHOD.value) + + def test_construct_invoke_connection_request_with_invalid_request_body(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.JSON.value} + mock_connection_request.body = [] + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + with self.assertRaises(SkyflowError) as context: + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_BODY.value) + + def test_construct_invoke_connection_request_with_url_encoded_content_type(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.URLENCODED.value} + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + def test_construct_invoke_connection_request_with_form_date_content_type(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.FORMDATA.value} + mock_connection_request.body = { + "name": (None, "John Doe") + } + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + def test_parse_insert_response(self): + api_response = Mock() + api_response.responses = [ + {"Status": 200, "Body": {"records": [{"skyflow_id": "id1"}]}}, + {"Status": 400, "Body": {"error": TEST_ERROR_MESSAGE}} + ] + result = parse_insert_response(api_response, continue_on_error=True) + self.assertEqual(len(result.inserted_fields), 1) + self.assertEqual(len(result.errors), 1) + + def test_parse_insert_response_continue_on_error_false(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(skyflow_id="id_1", tokens={"token1": "token_value1"}), + Mock(skyflow_id="id_2", tokens={"token2": "token_value2"}) + ] + + result = parse_insert_response(mock_api_response, continue_on_error=False) + + self.assertIsInstance(result, InsertResponse) + + expected_inserted_fields = [ + {"skyflow_id": "id_1", "token1": "token_value1"}, + {"skyflow_id": "id_2", "token2": "token_value2"} + ] + self.assertEqual(result.inserted_fields, expected_inserted_fields) + + self.assertEqual(result.errors, []) + + def test_parse_update_record_response(self): + api_response = Mock() + api_response.skyflow_id = "id1" + api_response.tokens = {"token1": "value1"} + result = parse_update_record_response(api_response) + self.assertEqual(result.updated_field['skyflow_id'], "id1") + self.assertEqual(result.updated_field['token1'], "value1") + + def test_parse_delete_response_successful(self): + mock_api_response = Mock() + mock_api_response.record_id_response = ["id_1", "id_2", "id_3"] + + result = parse_delete_response(mock_api_response) + + self.assertIsInstance(result, DeleteResponse) + + expected_deleted_ids = ["id_1", "id_2", "id_3"] + self.assertEqual(result.deleted_ids, expected_deleted_ids) + + self.assertEqual(result.errors, []) + + def test_parse_get_response_successful(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={'field1': 'value1', 'field2': 'value2'}), + Mock(fields={'field1': 'value3', 'field2': 'value4'}) + ] + + result = parse_get_response(mock_api_response) + + self.assertIsInstance(result, GetResponse) + + expected_data = [ + {'field1': 'value1', 'field2': 'value2'}, + {'field1': 'value3', 'field2': 'value4'} + ] + self.assertEqual(result.data, expected_data) + + self.assertEqual(result.errors, []) + + def test_parse_detokenize_response_with_mixed_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1", value="value1", value_type=Mock(value="Type1"), error=None), + Mock(token="token2", value=None, value_type=None, error="Some error"), + Mock(token="token3", value="value3", value_type=Mock(value="Type2"), error=None), + ] + + result = parse_detokenize_response(mock_api_response) + self.assertIsInstance(result, DetokenizeResponse) + + expected_detokenized_fields = [ + {"token": "token1", "value": "value1", "type": "Type1"}, + {"token": "token3", "value": "value3", "type": "Type2"} + ] + + expected_errors = [ + {"token": "token2", "error": "Some error"} + ] + + self.assertEqual(result.detokenized_fields, expected_detokenized_fields) + self.assertEqual(result.errors, expected_errors) + + def test_parse_tokenize_response_with_valid_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1"), + Mock(token="token2"), + Mock(token="token3"), + ] + + result = parse_tokenize_response(mock_api_response) + self.assertIsInstance(result, TokenizeResponse) + + expected_tokenized_fields = [ + {"token": "token1"}, + {"token": "token2"}, + {"token": "token3"} + ] + + self.assertEqual(result.tokenized_fields, expected_tokenized_fields) + + def test_parse_query_response_with_valid_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + result = parse_query_response(mock_api_response) + + self.assertIsInstance(result, QueryResponse) + + expected_fields = [ + {"field1": "value1", "field2": "value2", "tokenized_data": {}}, + {"field1": "value3", "field2": "value4", "tokenized_data": {}} + ] + + self.assertEqual(result.fields, expected_fields) + + @patch("requests.Response") + def test_parse_invoke_connection_response_successful(self, mock_response): + mock_response.status_code = 200 + mock_response.content = json.dumps({"key": "value"}).encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + result = parse_invoke_connection_response(mock_response) + + self.assertIsInstance(result, InvokeConnectionResponse) + self.assertEqual(result.response["key"], "value") + self.assertEqual(result.response["request_id"], "1234") + + @patch("requests.Response") + def test_parse_invoke_connection_response_json_decode_error(self, mock_response): + + mock_response.status_code = 200 + mock_response.content = "Non-JSON Content".encode('utf-8') + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Non-JSON Content")) + + @patch("requests.Response") + def test_parse_invoke_connection_response_http_error_with_json_error_message(self, mock_response): + mock_response.status_code = 404 + mock_response.content = json.dumps({"error": {"message": "Not Found"}}).encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + mock_response.raise_for_status.side_effect = HTTPError("404 Error") + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, "Not Found - request id: 1234") + + @patch("requests.Response") + def test_parse_invoke_connection_response_http_error_without_json_error_message(self, mock_response): + mock_response.status_code = 500 + mock_response.content = "Internal Server Error".encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + mock_response.raise_for_status.side_effect = HTTPError("500 Error") + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Internal Server Error") + " - request id: 1234") + + @patch("skyflow.utils._utils.log_and_reject_error") + def test_handle_exception_json_error(self, mock_log_and_reject_error): + + mock_error = Mock() + mock_error.headers = { + 'x-request-id': '1234', + 'content-type': 'application/json' + } + mock_error.body = json.dumps({ + "error": { + "message": "JSON error occurred.", + "http_code": 400, + "http_status": "Bad Request", + "grpc_code": "8", + "details": "Detailed message" + } + }).encode('utf-8') + mock_logger = Mock() + + handle_exception(mock_error, mock_logger) + + mock_log_and_reject_error.assert_called_once_with( + "JSON error occurred.", + 400, + "1234", + "Bad Request", + "8", + "Detailed message", + logger=mock_logger + ) + + def test_validate_api_key_valid_key(self): + valid_key = "sky-ABCDE-1234567890abcdef1234567890abcdef" + self.assertTrue(validate_api_key(valid_key)) + + def test_validate_api_key_invalid_length(self): + invalid_key = "sky-ABCDE-123" + self.assertFalse(validate_api_key(invalid_key)) + + def test_validate_api_key_invalid_pattern(self): + invalid_key = "sky-ABCDE-1234567890GHIJKL7890abcdef" + self.assertFalse(validate_api_key(invalid_key)) + + def test_encode_column_values(self): + get_request = MagicMock() + get_request.column_values = ["Hello World!", "foo/bar", "key=value", "email@example.com"] + + expected_encoded_values = [ + quote("Hello World!"), + quote("foo/bar"), + quote("key=value"), + quote("email@example.com"), + ] + + result = encode_column_values(get_request) + self.assertEqual(result, expected_encoded_values) diff --git a/tests/vault/__init__.py b/tests/vault/__init__.py index d803f19f..e69de29b 100644 --- a/tests/vault/__init__.py +++ b/tests/vault/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/vault/client/__init__.py b/tests/vault/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/vault/client/test__client.py b/tests/vault/client/test__client.py new file mode 100644 index 00000000..cc2e2d42 --- /dev/null +++ b/tests/vault/client/test__client.py @@ -0,0 +1,105 @@ +import unittest +from unittest.mock import patch, MagicMock +from skyflow.generated.rest import Configuration +from skyflow.vault.client.client import VaultClient + +CONFIG = { + "credentials": "some_credentials", + "cluster_id": "test_cluster_id", + "env": "test_env", + "vault_id": "test_vault_id", + "roles": ["role_id_1", "role_id_2"], + "ctx": "context" +} + +CREDENTIALS_WITH_API_KEY = {"api_key": "dummy_api_key"} + +class TestVaultClient(unittest.TestCase): + def setUp(self): + self.vault_client = VaultClient(CONFIG) + + def test_set_common_skyflow_credentials(self): + credentials = {"api_key": "dummy_api_key"} + self.vault_client.set_common_skyflow_credentials(credentials) + self.assertEqual(self.vault_client.get_common_skyflow_credentials(), credentials) + + def test_set_logger(self): + mock_logger = MagicMock() + self.vault_client.set_logger("INFO", mock_logger) + self.assertEqual(self.vault_client.get_log_level(), "INFO") + self.assertEqual(self.vault_client.get_logger(), mock_logger) + + @patch("skyflow.vault.client.client.get_credentials") + @patch("skyflow.vault.client.client.get_vault_url") + @patch("skyflow.vault.client.client.Configuration") + @patch("skyflow.vault.client.client.VaultClient.initialize_api_client") + def test_initialize_client_configuration(self, mock_init_api_client, mock_config, mock_get_vault_url, + mock_get_credentials): + mock_get_credentials.return_value = (CREDENTIALS_WITH_API_KEY) + mock_get_vault_url.return_value = "https://test-vault-url.com" + + self.vault_client.initialize_client_configuration() + + mock_get_credentials.assert_called_once_with(CONFIG["credentials"], None, logger=None) + mock_get_vault_url.assert_called_once_with(CONFIG["cluster_id"], CONFIG["env"], CONFIG["vault_id"], logger=None) + mock_config.assert_called_once_with(host="https://test-vault-url.com", access_token="dummy_api_key") + mock_init_api_client.assert_called_once() + + @patch("skyflow.vault.client.client.ApiClient") + def test_initialize_api_client(self, mock_api_client): + config = Configuration() + self.vault_client.initialize_api_client(config) + mock_api_client.assert_called_once_with(config) + + @patch("skyflow.vault.client.client.RecordsApi") + def test_get_records_api(self, mock_records_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_records_api() + mock_records_api.assert_called_once() + + @patch("skyflow.vault.client.client.TokensApi") + def test_get_tokens_api(self, mock_tokens_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_tokens_api() + mock_tokens_api.assert_called_once() + + @patch("skyflow.vault.client.client.QueryApi") + def test_get_query_api(self, mock_query_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_query_api() + mock_query_api.assert_called_once() + + def test_get_vault_id(self): + self.assertEqual(self.vault_client.get_vault_id(), CONFIG["vault_id"]) + + @patch("skyflow.vault.client.client.generate_bearer_token") + @patch("skyflow.vault.client.client.generate_bearer_token_from_creds") + @patch("skyflow.vault.client.client.log_info") + def test_get_bearer_token_with_api_key(self, mock_log_info, mock_generate_bearer_token, + mock_generate_bearer_token_from_creds): + token = self.vault_client.get_bearer_token(CREDENTIALS_WITH_API_KEY) + self.assertEqual(token, CREDENTIALS_WITH_API_KEY["api_key"]) + + def test_update_config(self): + new_config = {"credentials": "new_credentials"} + self.vault_client.update_config(new_config) + self.assertTrue(self.vault_client._VaultClient__is_config_updated) + self.assertEqual(self.vault_client.get_config()["credentials"], "new_credentials") + + def test_get_config(self): + self.assertEqual(self.vault_client.get_config(), CONFIG) + + def test_get_common_skyflow_credentials(self): + credentials = {"api_key": "dummy_api_key"} + self.vault_client.set_common_skyflow_credentials(credentials) + self.assertEqual(self.vault_client.get_common_skyflow_credentials(), credentials) + + def test_get_log_level(self): + log_level = "DEBUG" + self.vault_client.set_logger(log_level, MagicMock()) + self.assertEqual(self.vault_client.get_log_level(), log_level) + + def test_get_logger(self): + mock_logger = MagicMock() + self.vault_client.set_logger("INFO", mock_logger) + self.assertEqual(self.vault_client.get_logger(), mock_logger) \ No newline at end of file diff --git a/tests/vault/controller/__init__.py b/tests/vault/controller/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py new file mode 100644 index 00000000..0bd3d293 --- /dev/null +++ b/tests/vault/controller/test__connection.py @@ -0,0 +1,104 @@ +import unittest +from unittest.mock import Mock, patch + +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.enums import RequestMethod +from skyflow.vault.connection import InvokeConnectionRequest +from skyflow.vault.controller import Connection + +VALID_BEARER_TOKEN = "test_bearer_token" +VAULT_CONFIG = { + "credentials": {"api_key": "test_api_key"}, + "connection_url": "https://CONNECTION_URL" +} +SUCCESS_STATUS_CODE = 200 +SUCCESS_RESPONSE_CONTENT = '{"response": "success"}' +VALID_BODY = {"key": "value"} +VALID_PATH_PARAMS = {"path_key": "value"} +VALID_HEADERS = {"Content-Type": "application/json"} +VALID_QUERY_PARAMS = {"query_key": "value"} +INVALID_HEADERS = "invalid_headers" +INVALID_BODY = "invalid_body" +FAILURE_STATUS_CODE = 400 +ERROR_RESPONSE_CONTENT = '{"error": {"message": "error occurred"}}' + +class TestConnection(unittest.TestCase): + def setUp(self): + self.mock_vault_client = Mock() + self.mock_vault_client.get_config.return_value = VAULT_CONFIG + self.mock_vault_client.get_bearer_token.return_value = VALID_BEARER_TOKEN + self.connection = Connection(self.mock_vault_client) + + @patch('requests.Session.send') + def test_invoke_success(self, mock_send): + # Mocking successful response + mock_response = Mock() + mock_response.status_code = SUCCESS_STATUS_CODE + mock_response.content = SUCCESS_RESPONSE_CONTENT + mock_response.headers = {'x-request-id': 'test-request-id'} + mock_send.return_value = mock_response + + request = InvokeConnectionRequest( + method=RequestMethod.POST, + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + # Test invoke method + response = self.connection.invoke(request) + + # Assertions for successful invocation + self.assertEqual(response.response, {"response": "success", "request_id": "test-request-id"}) + self.mock_vault_client.get_bearer_token.assert_called_once() + + @patch('requests.Session.send') + def test_invoke_invalid_headers(self, mock_send): + request = InvokeConnectionRequest( + method="POST", + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=INVALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value) + + @patch('requests.Session.send') + def test_invoke_invalid_body(self, mock_send): + request = InvokeConnectionRequest( + method="POST", + body=INVALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_BODY.value) + + @patch('requests.Session.send') + def test_invoke_request_error(self, mock_send): + mock_response = Mock() + mock_response.status_code = FAILURE_STATUS_CODE + mock_response.content = ERROR_RESPONSE_CONTENT + mock_send.return_value = mock_response + + request = InvokeConnectionRequest( + method=RequestMethod.POST, + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) + + diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py new file mode 100644 index 00000000..0d2ea3d8 --- /dev/null +++ b/tests/vault/controller/test__vault.py @@ -0,0 +1,558 @@ +import unittest +from unittest.mock import Mock, patch +from skyflow.generated.rest import RecordServiceBatchOperationBody, V1BatchRecord, RecordServiceInsertRecordBody, \ + V1FieldRecords, RecordServiceUpdateRecordBody, RecordServiceBulkDeleteRecordBody, QueryServiceExecuteQueryBody, \ + V1DetokenizeRecordRequest, V1DetokenizePayload, V1TokenizePayload, V1TokenizeRecordRequest, RedactionEnumREDACTION +from skyflow.utils.enums import RedactionType, TokenMode +from skyflow.vault.controller import Vault +from skyflow.vault.data import InsertRequest, InsertResponse, UpdateResponse, UpdateRequest, DeleteResponse, \ + DeleteRequest, GetRequest, GetResponse, QueryRequest, QueryResponse +from skyflow.vault.tokens import DetokenizeRequest, DetokenizeResponse, TokenizeResponse, TokenizeRequest + +VAULT_ID = "test_vault_id" +TABLE_NAME = "test_table" + +class TestVault(unittest.TestCase): + + def setUp(self): + # Mock vault client + self.vault_client = Mock() + self.vault_client.get_vault_id.return_value = VAULT_ID + self.vault_client.get_logger.return_value = Mock() + + # Create a Vault instance with the mock client + self.vault = Vault(self.vault_client) + + @patch("skyflow.vault.controller._vault.validate_insert_request") + @patch("skyflow.vault.controller._vault.parse_insert_response") + def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate): + """Test insert functionality when continue_on_error is True.""" + + # Mock request + request = InsertRequest( + table_name=TABLE_NAME, + values=[{"field": "value"}], + tokens=None, + return_tokens=True, + upsert='column_name', + continue_on_error=True + ) + + expected_body = RecordServiceBatchOperationBody( + records=[ + V1BatchRecord( + fields={"field": "value"}, + table_name=TABLE_NAME, + method="POST", + tokenization=True, + upsert="column_name" + ) + ], + continue_on_error=True, + byot="DISABLE" + ) + + # Mock API response to contain a mix of successful and failed insertions + mock_api_response = Mock() + mock_api_response.responses = [ + {"Status": 200, "Body": {"records": [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]}}, + {"Status": 400, "Body": {"error": "Insert error for record 2"}} + ] + + # Expected parsed response + expected_inserted_fields = [ + {'skyflow_id': 'id1', 'request_index': 0, 'token_field': 'token_val1'} + ] + expected_errors = [ + {'request_index': 1, 'error': 'Insert error for record 2'} + ] + expected_response = InsertResponse(inserted_fields=expected_inserted_fields, errors=expected_errors) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_batch_operation.return_value = mock_api_response + + # Call the insert function + result = self.vault.insert(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_batch_operation.assert_called_once_with(VAULT_ID, expected_body) + mock_parse_response.assert_called_once_with(mock_api_response, True) + + # Assert that the result matches the expected InsertResponse + self.assertEqual(result.inserted_fields, expected_inserted_fields) + self.assertEqual(result.errors, expected_errors) + + @patch("skyflow.vault.controller._vault.validate_insert_request") + @patch("skyflow.vault.controller._vault.parse_insert_response") + def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_validate): + """Test insert functionality when continue_on_error is False, ensuring a single bulk insert.""" + + # Mock request with continue_on_error set to False + request = InsertRequest( + table_name=TABLE_NAME, + values=[{"field": "value"}], + tokens=None, + return_tokens=True, + upsert=None, + homogeneous=True, + continue_on_error=False + ) + + # Expected API request body based on InsertRequest parameters + expected_body = RecordServiceInsertRecordBody( + records=[ + V1FieldRecords(fields={"field": "value"}) + ], + tokenization=True, + upsert=None, + homogeneous=True + ) + + # Mock API response for a successful insert + mock_api_response = Mock() + mock_api_response.records = [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}] + + # Expected parsed response + expected_inserted_fields = [{'skyflow_id': 'id1', 'token_field': 'token_val1'}] + expected_response = InsertResponse(inserted_fields=expected_inserted_fields) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_insert_record.return_value = mock_api_response + + # Call the insert function + result = self.vault.insert(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_insert_record.assert_called_once_with(VAULT_ID, TABLE_NAME, + expected_body) + mock_parse_response.assert_called_once_with(mock_api_response, False) + + # Assert that the result matches the expected InsertResponse + self.assertEqual(result.inserted_fields, expected_inserted_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_insert_request") + @patch("skyflow.vault.controller._vault.parse_insert_response") + def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock_parse_response, mock_validate): + """Test insert functionality when continue_on_error is False, ensuring a single bulk insert.""" + + # Mock request with continue_on_error set to False + request = InsertRequest( + table_name=TABLE_NAME, + values=[{"field": "value"}], + tokens=[{"token_field": "token_val1"}], + return_tokens=True, + upsert=None, + homogeneous=True, + continue_on_error=False + ) + + # Expected API request body based on InsertRequest parameters + expected_body = RecordServiceInsertRecordBody( + records=[ + V1FieldRecords(fields={"field": "value"}, tokens={"token_field": "token_val1"}) + ], + tokenization=True, + upsert=None, + homogeneous=True + ) + + # Mock API response for a successful insert + mock_api_response = Mock() + mock_api_response.records = [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}] + + # Expected parsed response + expected_inserted_fields = [{'skyflow_id': 'id1', 'token_field': 'token_val1'}] + expected_response = InsertResponse(inserted_fields=expected_inserted_fields) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_insert_record.return_value = mock_api_response + + # Call the insert function + result = self.vault.insert(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_insert_record.assert_called_once_with(VAULT_ID, TABLE_NAME, + expected_body) + mock_parse_response.assert_called_once_with(mock_api_response, False) + + # Assert that the result matches the expected InsertResponse + self.assertEqual(result.inserted_fields, expected_inserted_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_update_request") + @patch("skyflow.vault.controller._vault.parse_update_record_response") + def test_update_successful(self, mock_parse_response, mock_validate): + """Test update functionality for a successful update request.""" + + # Mock request + request = UpdateRequest( + table=TABLE_NAME, + data={"skyflow_id": "12345", "field": "new_value"}, + tokens=None, + return_tokens=True, + token_mode=TokenMode.DISABLE + ) + + # Expected payload + expected_payload = RecordServiceUpdateRecordBody( + record=V1FieldRecords( + fields={"field": "new_value"}, + tokens=request.tokens + ), + tokenization=request.return_tokens, + byot=request.token_mode.value + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.skyflow_id = "12345" + mock_api_response.tokens = {"token_field": "token_value"} + + # Expected parsed response + expected_updated_field = {'skyflow_id': "12345", 'token_field': "token_value"} + expected_response = UpdateResponse(updated_field=expected_updated_field) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_update_record.return_value = mock_api_response + + # Call the update function + result = self.vault.update(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_update_record.assert_called_once_with( + VAULT_ID, + request.table, + request.data["skyflow_id"], + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected UpdateResponse + self.assertEqual(result.updated_field, expected_updated_field) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_delete_request") + @patch("skyflow.vault.controller._vault.parse_delete_response") + def test_delete_successful(self, mock_parse_response, mock_validate): + """Test delete functionality for a successful delete request.""" + + # Mock request + request = DeleteRequest( + table=TABLE_NAME, + ids=["12345", "67890"] + ) + + # Expected payload + expected_payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) + + # Mock API response + mock_api_response = Mock() + mock_api_response.record_id_response = ["12345", "67890"] + + # Expected parsed response + expected_deleted_ids = ["12345", "67890"] + expected_response = DeleteResponse(deleted_ids=expected_deleted_ids, errors=[]) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_delete_record.return_value = mock_api_response + + # Call the delete function + result = self.vault.delete(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_bulk_delete_record.assert_called_once_with( + VAULT_ID, + request.table, + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected DeleteResponse + self.assertEqual(result.deleted_ids, expected_deleted_ids) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_get_request") + @patch("skyflow.vault.controller._vault.parse_get_response") + def test_get_successful(self, mock_parse_response, mock_validate): + """Test get functionality for a successful get request.""" + + # Mock request + request = GetRequest( + table=TABLE_NAME, + ids=["12345", "67890"], + redaction_type=RedactionType.PLAIN_TEXT, + return_tokens=True, + fields=["field1", "field2"], + offset="0", + limit="10", + download_url=True, + column_values=None + ) + + # Expected payload + expected_payload = { + "object_name": request.table, + "skyflow_ids": request.ids, + "redaction": request.redaction_type.value, + "tokenization": request.return_tokens, + "fields": request.fields, + "offset": request.offset, + "limit": request.limit, + "download_url": request.download_url, + "column_name": request.column_name, + "column_values": request.column_values + } + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + # Expected parsed response + expected_data = [ + {"field1": "value1", "field2": "value2"}, + {"field1": "value3", "field2": "value4"} + ] + expected_response = GetResponse(data=expected_data, errors=[]) + + # Set the return value for parse_get_response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_get_record.return_value = mock_api_response + + # Call the get function + result = self.vault.get(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_bulk_get_record.assert_called_once_with( + VAULT_ID, + **expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected GetResponse + self.assertEqual(result.data, expected_data) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_get_request") + @patch("skyflow.vault.controller._vault.parse_get_response") + def test_get_successful_with_column_values(self, mock_parse_response, mock_validate): + """Test get functionality for a successful get request.""" + + # Mock request + request = GetRequest( + table=TABLE_NAME, + redaction_type=RedactionType.PLAIN_TEXT, + column_values=['customer+15@gmail.com'], + column_name='email' + ) + + # Expected payload + expected_payload = { + "object_name": request.table, + "tokenization": request.return_tokens, + "column_name": request.column_name, + "column_values": request.column_values + } + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + # Expected parsed response + expected_data = [ + {"field1": "value1", "field2": "value2"}, + {"field1": "value3", "field2": "value4"} + ] + expected_response = GetResponse(data=expected_data, errors=[]) + + # Set the return value for parse_get_response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_get_record.return_value = mock_api_response + + # Call the get function + result = self.vault.get(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_bulk_get_record.assert_called_once() + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected GetResponse + self.assertEqual(result.data, expected_data) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_query_request") + @patch("skyflow.vault.controller._vault.parse_query_response") + def test_query_successful(self, mock_parse_response, mock_validate): + """Test query functionality for a successful query request.""" + + # Mock request + request = QueryRequest(query="SELECT * FROM test_table") + + # Expected payload as a QueryServiceExecuteQueryBody instance + expected_payload = QueryServiceExecuteQueryBody(query=request.query) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + # Expected parsed response + expected_fields = [ + {"field1": "value1", "field2": "value2", "tokenized_data": {}}, + {"field1": "value3", "field2": "value4", "tokenized_data": {}} + ] + expected_response = QueryResponse() + expected_response.fields = expected_fields + + # Set the return value for parse_query_response + mock_parse_response.return_value = expected_response + query_api = self.vault_client.get_query_api.return_value + query_api.query_service_execute_query.return_value = mock_api_response + + # Call the query function + result = self.vault.query(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + query_api.query_service_execute_query.assert_called_once_with( + VAULT_ID, + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected QueryResponse + self.assertEqual(result.fields, expected_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_detokenize_request") + @patch("skyflow.vault.controller._vault.parse_detokenize_response") + def test_detokenize_successful(self, mock_parse_response, mock_validate): + request = DetokenizeRequest( + tokens=["token1", "token2"], + redaction_type=RedactionType.PLAIN_TEXT, + continue_on_error=False + ) + + # Expected payload as a V1DetokenizePayload instance + tokens_list = [ + V1DetokenizeRecordRequest(token="token1", redaction=RedactionEnumREDACTION.PLAIN_TEXT), + V1DetokenizeRecordRequest(token="token2", redaction=RedactionEnumREDACTION.PLAIN_TEXT) + ] + expected_payload = V1DetokenizePayload( + detokenization_parameters=tokens_list, + continue_on_error=request.continue_on_error + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1", value="value1", value_type=Mock(value="STRING"), error=None), + Mock(token="token2", value="value2", value_type=Mock(value="STRING"), error=None) + ] + + # Expected parsed response + expected_fields = [ + {"token": "token1", "value": "value1", "type": "STRING"}, + {"token": "token2", "value": "value2", "type": "STRING"} + ] + expected_response = DetokenizeResponse(detokenized_fields=expected_fields, errors=[]) + + # Set the return value for parse_detokenize_response + mock_parse_response.return_value = expected_response + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_detokenize.return_value = mock_api_response + + # Call the detokenize function + result = self.vault.detokenize(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + tokens_api.record_service_detokenize.assert_called_once_with( + VAULT_ID, + detokenize_payload=expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected DetokenizeResponse + self.assertEqual(result.detokenized_fields, expected_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_tokenize_request") + @patch("skyflow.vault.controller._vault.parse_tokenize_response") + def test_tokenize_successful(self, mock_parse_response, mock_validate): + """Test tokenize functionality for a successful tokenize request.""" + + # Mock request with tokenization parameters + request = TokenizeRequest( + values=[ + {"value": "value1", "column_group": "group1"}, + {"value": "value2", "column_group": "group2"} + ] + ) + + # Expected payload as a V1TokenizePayload instance + records_list = [ + V1TokenizeRecordRequest(value="value1", column_group="group1"), + V1TokenizeRecordRequest(value="value2", column_group="group2") + ] + expected_payload = V1TokenizePayload(tokenization_parameters=records_list) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1"), + Mock(token="token2") + ] + + # Expected parsed response + expected_fields = [ + {"token": "token1"}, + {"token": "token2"} + ] + expected_response = TokenizeResponse(tokenized_fields=expected_fields) + + # Set the return value for parse_tokenize_response + mock_parse_response.return_value = expected_response + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_tokenize.return_value = mock_api_response + + # Call the tokenize function + result = self.vault.tokenize(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + tokens_api.record_service_tokenize.assert_called_once_with( + VAULT_ID, + tokenize_payload=expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected TokenizeResponse + self.assertEqual(result.tokenized_fields, expected_fields) \ No newline at end of file diff --git a/tests/vault/test_client_init.py b/tests/vault/test_client_init.py deleted file mode 100644 index 3cd670f9..00000000 --- a/tests/vault/test_client_init.py +++ /dev/null @@ -1,42 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import logging -import unittest - - -from skyflow.vault._config import * -from skyflow.vault._client import Client -from skyflow.errors._skyflow_errors import * -from skyflow import set_log_level, LogLevel - - -class TestConfig(unittest.TestCase): - - def testClientInitInvalidVaultURL(self): - config = Configuration('VAULT ID', 22, lambda: 'token') - - try: - client = Client(config) - self.fail('Should fail due to invalid VAULT URL') - except SkyflowError as e: - self.assertEqual(SkyflowErrorCodes.INVALID_INPUT.value, e.code) - self.assertEqual( - SkyflowErrorMessages.VAULT_URL_INVALID_TYPE.value % (type(22)), e.message) - - def testClientInitInvalidTokenProvider(self): - config = Configuration('VAULT ID', 'VAULT URL', 'token') - - try: - client = Client(config) - self.fail('Should fail due to invalid TOKEN PROVIDER') - except SkyflowError as e: - self.assertEqual(SkyflowErrorCodes.INVALID_INPUT.value, e.code) - self.assertEqual(SkyflowErrorMessages.TOKEN_PROVIDER_ERROR.value % ( - type('token')), e.message) - - def testLogLevel(self): - skyflowLogger = logging.getLogger('skyflow') - self.assertEqual(skyflowLogger.getEffectiveLevel(), logging.ERROR) - set_log_level(logLevel=LogLevel.DEBUG) - self.assertEqual(skyflowLogger.level, logging.DEBUG) diff --git a/tests/vault/test_config.py b/tests/vault/test_config.py deleted file mode 100644 index 7c4f5ced..00000000 --- a/tests/vault/test_config.py +++ /dev/null @@ -1,77 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from http import client -import unittest - -from skyflow.vault._config import * -from skyflow.vault import Client -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages - - -class TestConfig(unittest.TestCase): - def testInsertOptions(self): - defaultOptions = InsertOptions() - noTokensOption = InsertOptions(tokens=False) - - self.assertEqual(defaultOptions.tokens, True) - self.assertEqual(noTokensOption.tokens, False) - - def testSkyflowConfig(self): - myconfig = Configuration( - "vaultID", "https://vaults.skyflow.com", lambda: "token") - self.assertEqual(myconfig.vaultID, "vaultID") - self.assertEqual(myconfig.vaultURL, "https://vaults.skyflow.com") - self.assertEqual(myconfig.tokenProvider(), "token") - - def testConnectionConfigDefaults(self): - config = ConnectionConfig( - 'https://skyflow.com', methodName=RequestMethod.GET) - self.assertEqual(config.connectionURL, 'https://skyflow.com') - self.assertEqual(config.methodName, RequestMethod.GET) - self.assertDictEqual(config.pathParams, {}) - self.assertDictEqual(config.queryParams, {}) - self.assertDictEqual(config.requestHeader, {}) - self.assertDictEqual(config.requestBody, {}) - - def testConfigArgs(self): - configOnlyTokenProvider = Configuration(lambda: "token") - self.assertIsNotNone(configOnlyTokenProvider.tokenProvider) - self.assertEqual(configOnlyTokenProvider.vaultID, '') - self.assertEqual(configOnlyTokenProvider.vaultURL, '') - - try: - Configuration() - except TypeError as e: - self.assertEqual(e.args[0], "tokenProvider must be given") - - def testConfigInvalidIdType(self): - try: - config = Configuration( - ['invalid'], 'www.example.org', lambda: 'token') - Client(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.VAULT_ID_INVALID_TYPE.value % type(['invalid'])) - - def testCheckConfigEmptyVaultId(self): - try: - config = Configuration('', '', lambda: 'token') - Client(config)._checkConfig('test') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_VAULT_ID.value) - - def testCheckConfigEmptyVaultURL(self): - try: - config = Configuration('vault_id', '', lambda: 'token') - Client(config)._checkConfig('test') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_VAULT_URL.value) diff --git a/tests/vault/test_delete.py b/tests/vault/test_delete.py deleted file mode 100644 index fd6e7c3d..00000000 --- a/tests/vault/test_delete.py +++ /dev/null @@ -1,235 +0,0 @@ -import json -import unittest -import os - -import asyncio -import warnings -from unittest import mock -from unittest.mock import patch, MagicMock - -import requests -from requests import HTTPError -from requests.models import Response -from dotenv import dotenv_values - -from skyflow.errors import SkyflowError, SkyflowErrorCodes -from skyflow.errors._skyflow_errors import SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, DeleteOptions -from skyflow.vault._delete import deleteProcessResponse - - -class TestDelete(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - "12345", "demo", tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - - self.record_id = "123" - - self.mockResponse = { - "responses": [ - { - "records": [ - { - "skyflow_id": self.record_id, - "deleted": True - } - ] - } - ] - } - self.DeleteOptions = DeleteOptions(tokens=False) - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testDeleteInvalidRecordsType(self): - invalidData = {"records": "invalid"} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testDeleteMissingdata(self): - invalid_data = {} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDeleteEmptyRecords(self): - invalid_data = {"records": []} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.EMPTY_RECORDS_IN_DELETE.value) - - def testDeleteMissingRecordsKey(self): - invalid_data = {"some_other_key": "value"} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDeleteNoIds(self): - invalidData = {"records": [{"invalid": "invalid", "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testDeleteInvalidIdType(self): - invalidData = {"records": [{"id": ["invalid"], "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (list)) - - def testDeleteInvalidIdType2(self): - invalidData = {"records": [{"id": 123, "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testDeleteEmptyId(self): - invalidData = {"records": [{"id": "", "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.EMPTY_ID_IN_DELETE.value) - - def testDeleteNoTable(self): - invalidData = {"records": [{"id": "id1", "invalid": "invalid"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testDeleteInvalidTableType(self): - invalidData = {"records": [{"id": "id1", "table": ["invalid"]}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testDeleteEmptyTable(self): - invalidData = {"records": [{"id": "123", "table": ""}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.EMPTY_TABLE_IN_DELETE.value) - - def testDeleteProcessResponseWithSuccessfulResponse(self): - mock_response = requests.Response() - mock_response.status_code = 200 - mock_response._content = b'{"key": "value"}' - partial, result = deleteProcessResponse(mock_response) - self.assertFalse(partial) - self.assertIsInstance(result, dict) - self.assertEqual(result, {"key": "value"}) - - def testDeleteProcessResponseWithNoContentResponse(self): - mock_response = requests.Response() - mock_response.status_code = 204 - result = deleteProcessResponse(mock_response) - self.assertIsNone(result) - - def test_http_error_with_error_message(self): - error_response = { - 'code': 400, - 'description': 'Error occurred' - } - response = mock.Mock(spec=requests.Response, status_code=400, - content=json.dumps(error_response).encode()) - partial, error = deleteProcessResponse(response) - self.assertFalse(partial) - self.assertEqual(error, { - "code": 400, - "description": "Error occurred", - }) - - def test_delete_data_with_errors(self): - response = mock.Mock(spec=requests.Response) - response.status_code = 404 - response.content = b'{"code": 404, "description": "Not found"}' - with mock.patch('requests.delete', return_value=response): - records = {"records": [ - {"id": "id1", "table": "stripe"}, - ]} - result = self.client.delete(records) - - self.assertIn('errors', result) - error = result['errors'][0] - self.assertEqual(error['id'], "id1") - self.assertEqual(error['error'], {'code': 404, 'description': 'Not found'}) - - def testDeleteProcessInvalidResponse(self): - response = Response() - response.status_code = 500 - response._content = b"Invalid Request" - try: - deleteProcessResponse(response) - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % - response.content.decode('utf-8')) - - def test_delete_process_response_with_error(self): - mock_response = mock.Mock(spec=requests.Response) - mock_response.status_code = 404 - mock_response.content = b'{"error": {"message": "Not found"}}' - mock_response.headers = {'x-request-id': 'request-id-123'} - partial, error = deleteProcessResponse(mock_response) - self.assertFalse(partial) - self.assertEqual(error, {"error": {"message": "Not found"}}) - - def test_delete_process_response_response_not_json(self): - mock_response = mock.Mock(spec=requests.Response) - mock_response.status_code = 500 - mock_response.content = b'Not a valid JSON response' - - with self.assertRaises(SkyflowError) as cm: - deleteProcessResponse(mock_response) - - exception = cm.exception - self.assertEqual(exception.code, 500) - self.assertIn("Not a valid JSON response", str(exception)) diff --git a/tests/vault/test_detokenize.py b/tests/vault/test_detokenize.py deleted file mode 100644 index 28ee8bed..00000000 --- a/tests/vault/test_detokenize.py +++ /dev/null @@ -1,265 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os -from skyflow.vault._detokenize import getDetokenizeRequestBody, createDetokenizeResponseBody, getBulkDetokenizeRequestBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault._client import Client, Configuration -from skyflow.service_account import generate_bearer_token -from skyflow.vault._config import DetokenizeOptions, RedactionType -from dotenv import dotenv_values -import warnings - -import json -import asyncio - - -class TestDetokenize(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.testToken = self.envValues["DETOKENIZE_TEST_TOKEN"] - self.tokenField = { - "token": self.envValues["DETOKENIZE_TEST_TOKEN"] - } - self.data = {"records": [self.tokenField]} - self.mocked_futures = [] - self.event_loop = asyncio.new_event_loop() - - def tokenProvider(): - token, _ = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result((json.dumps(response).encode(), statusCode)) - else: - future.set_result((response, statusCode)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetDetokenizeRequestBodyWithValidBody(self): - body = getDetokenizeRequestBody(self.tokenField) - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "PLAIN_TEXT" - }] - } - - self.assertEqual(body, expectedOutput) - - def testDetokenizeNoRecords(self): - invalidData = {"invalidKey": self.tokenField} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDetokenizeRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testDetokenizeNoToken(self): - invalidData = {"records": [{"invalid": "invalid"}]} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_KEY_ERROR.value) - - def testDetokenizeTokenInvalidType(self): - invalidData = {"records": [{"token": ["invalid"]}]} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TOKEN_TYPE.value % (list)) - - def testDetokenizeRedactionInvalidType(self): - invalidData = {"records": [{"token": "valid", "redaction": 'demo'}]} - try: - self.client.detokenize(invalidData) - except SkyflowError as error: - self.assertTrue(error) - self.assertEqual(error.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(error.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % str(type("demo"))) - - def testResponseBodySuccess(self): - response = {"records": [{"token": "abc", "value": "secret"}]} - self.add_mock_response(response, 200) - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - self.assertEqual(partial, False) - self.assertIn("records", res) - self.assertNotIn("errors", res) - self.assertEqual(len(res["records"]), 1) - self.assertEqual(res, {"records": response["records"]}) - - def testResponseBodyPartialSuccess(self): - success_response = {"records": [{"token": "abc", "value": "secret"}]} - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(success_response, 200) - self.add_mock_response(error_response, 404) - - detokenizeRecords = {"records": [self.tokenField, self.tokenField]} - - res, partial = createDetokenizeResponseBody(detokenizeRecords, self.mocked_futures, DetokenizeOptions()) - self.assertTrue(partial) - - records = res["records"] - self.assertIsNotNone(records) - self.assertEqual(len(records), 1) - self.assertEqual(records, success_response["records"]) - - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseBodyFailure(self): - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(error_response, 404) - - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - self.assertFalse(partial) - - self.assertNotIn("records", res) - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseBodySuccessWithContinueOnErrorAsFalse(self): - response = { - "records": [ - {"token": "abc", "value": "secret1"}, - {"token": "def", "value": "secret2"} - ] - } - self.add_mock_response(response, 200) - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions(False)) - self.assertEqual(partial, False) - self.assertIn("records", res) - self.assertNotIn("errors", res) - self.assertEqual(len(res["records"]), 2) - self.assertEqual(res, {"records": response["records"]}) - - def testResponseBodyFailureWithContinueOnErrorAsFalse(self): - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(error_response, 404) - - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions(False)) - self.assertFalse(partial) - - self.assertNotIn("records", res) - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], error_response["error"]["http_code"]) - self.assertEqual(errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseNotJson(self): - response = "not a valid json".encode() - self.add_mock_response(response, 200, encode=False) - try: - createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % - response.decode('utf-8')) - - def testRequestBodyNoRedactionKey(self): - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "PLAIN_TEXT" - }] - } - requestBody = getDetokenizeRequestBody(self.tokenField) - self.assertEqual(requestBody, expectedOutput) - - def testRequestBodyWithValidRedaction(self): - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "REDACTED" - }] - } - data = { - "token": self.testToken, - "redaction": RedactionType.REDACTED - } - requestBody = getDetokenizeRequestBody(data) - self.assertEqual(expectedOutput, requestBody) - - def testRequestBodyWithInValidRedaction(self): - data = { - "token": self.testToken, - "redaction": "123" - } - try: - getDetokenizeRequestBody(data) - except SkyflowError as error: - self.assertTrue(error) - self.assertEqual(error.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(error.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % str(type(data["redaction"]))) - - def testGetBulkDetokenizeRequestBody(self): - expectedOutput = { - "detokenizationParameters": [ - { - "token": self.testToken, - "redaction": "REDACTED" - }, - { - "token": self.testToken, - "redaction": "REDACTED" - }, - ] - } - data = { - "token": self.testToken, - "redaction": RedactionType.REDACTED - } - try: - requestBody = getBulkDetokenizeRequestBody([data, data]) - self.assertIn("detokenizationParameters", requestBody) - self.assertEqual(len(requestBody["detokenizationParameters"]), 2) - self.assertEqual(expectedOutput, requestBody) - except SkyflowError as e: - self.fail('Should not have thrown an error') - \ No newline at end of file diff --git a/tests/vault/test_get.py b/tests/vault/test_get.py deleted file mode 100644 index bd98efc1..00000000 --- a/tests/vault/test_get.py +++ /dev/null @@ -1,259 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os - -import warnings -import asyncio -import json -from dotenv import dotenv_values -from skyflow.service_account import generate_bearer_token -from skyflow.vault import Client, Configuration, RedactionType, GetOptions -from skyflow.vault._get import getGetRequestBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages - -class TestGet(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/test_get.py') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, table, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result( - (json.dumps(response).encode(), statusCode, table)) - else: - future.set_result((response, statusCode, table)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetByIdNoRecords(self): - invalidData = {"invalidKey": "invalid"} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetByIdRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testGetByIdNoIds(self): - invalidData = {"records": [ - {"invalid": "invalid", "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidIdsType(self): - invalidData = {"records": [ - {"ids": "invalid", "table": "newstripe", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_IDS_TYPE.value % (str)) - - def testGetByIdInvalidIdsType2(self): - invalidData = {"records": [ - {"ids": ["123", 123], "table": "newstripe", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testGetByIdNoTable(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "invalid": "invalid", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetByIdInvalidTableType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": ["invalid"], "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testGetByIdNoColumnName(self): - invalidData = {"records": [ - {"table": "newstripe", "redaction": RedactionType.PLAIN_TEXT}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidColumnName(self): - invalidData = {"records": [ - {"ids": ["123", "456"], "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, - "columnName": ["invalid"]}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_COLUMN_NAME.value % (list)) - - def testGetByIdNoColumnValues(self): - invalidData = {"records": [ - {"table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, "columnName": "card_number"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidColumnValues(self): - invalidData = {"records": [ - {"ids": ["123", "456"], "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, - "columnName": "card_number", "columnValues": "invalid"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_COLUMN_VALUE.value % (str) ) - - def testGetByTokenAndRedaction(self): - invalidData = {"records": [ - {"ids": ["123","456"], - "table": "stripe", "redaction": RedactionType.PLAIN_TEXT,}]} - options = GetOptions(True) - try: - self.client.get(invalidData,options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.REDACTION_WITH_TOKENS_NOT_SUPPORTED.value) - - def testGetByNoOptionAndRedaction(self): - invalidData = {"records":[{"ids":["123", "456"], "table":"newstripe"}]} - options = GetOptions(False) - try: - self.client.get(invalidData,options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code,SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message,SkyflowErrorMessages.REDACTION_KEY_ERROR.value) - - def testGetByOptionAndUniqueColumnRedaction(self): - invalidData ={ - "records":[{ - "table":"newstripe", - "columnName":"card_number", - "columnValues":["456","980"], - }] - } - options = GetOptions(True) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.TOKENS_GET_COLUMN_NOT_SUPPORTED.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKENS_GET_COLUMN_NOT_SUPPORTED.value) - - def testInvalidRedactionTypeWithNoOption(self): - invalidData = { - "records": [{ - "ids": ["123","456"], - "table": "stripe", - "redaction": "invalid_redaction" - }] - } - options = GetOptions(False) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % (str)) - - def testBothSkyflowIdsAndColumnDetailsPassed(self): - invalidData = { - "records": [ - { - "ids": ["123", "456"], - "table": "stripe", - "redaction": RedactionType.PLAIN_TEXT, - "columnName": "email", - "columnValues": ["email1@gmail.com", "email2@gmail.co"] - } - ] - } - options = GetOptions(False) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED.value) - - def testGetRequestBodyReturnsRequestBodyWithIds(self): - validData = { - "records": [{ - "ids": ["123", "456"], - "table": "stripe", - }] - } - options = GetOptions(True) - try: - requestBody = getGetRequestBody(validData["records"][0], options) - self.assertTrue(requestBody["tokenization"]) - except SkyflowError as e: - self.fail('Should not have thrown an error') \ No newline at end of file diff --git a/tests/vault/test_get_by_id.py b/tests/vault/test_get_by_id.py deleted file mode 100644 index d9676237..00000000 --- a/tests/vault/test_get_by_id.py +++ /dev/null @@ -1,193 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os - -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault import Client, Configuration, RedactionType -from skyflow.vault._get_by_id import createGetResponseBody -from skyflow.service_account import generate_bearer_token -from dotenv import dotenv_values -import warnings -import asyncio -import json - - -class TestGetById(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, table, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result( - (json.dumps(response).encode(), statusCode, table)) - else: - future.set_result((response, statusCode, table)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetByIdNoRecords(self): - invalidData = {"invalidKey": "invalid"} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetByIdRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testGetByIdNoIds(self): - invalidData = {"records": [ - {"invalid": "invalid", "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testGetByIdInvalidIdsType(self): - invalidData = {"records": [ - {"ids": "invalid", "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_IDS_TYPE.value % (str)) - - def testGetByIdInvalidIdsType2(self): - invalidData = {"records": [ - {"ids": ["123", 123], "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testGetByIdNoTable(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "invalid": "invalid", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetByIdInvalidTableType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": ["invalid"], "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testGetByIdNoRedaction(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": "pii_fields", "invalid": "invalid"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.REDACTION_KEY_ERROR.value) - - def testGetByIdInvalidRedactionType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % (str)) - - def testCreateResponseBodySuccess(self): - response = {"records": [ - {"fields": {"card_number": "4111-1111-1111-1111"}}]} - self.add_mock_response(response, 200, "table") - result, partial = createGetResponseBody(self.mocked_futures) - - self.assertFalse(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["fields"], - response["records"][0]["fields"]) - self.assertEqual(result["records"][0]["table"], "table") - - def testCreateResponseBodyPartialSuccess(self): - success_response = {"records": [ - {"fields": {"card_number": "4111-1111-1111-1111"}}]} - self.add_mock_response(success_response, 200, "table") - - failed_response = {"error": { - "http_code": 404, - "message": "Not Found" - }} - self.add_mock_response(failed_response, 404, "ok") - - result, partial = createGetResponseBody(self.mocked_futures) - - self.assertTrue(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["fields"], - success_response["records"][0]["fields"]) - self.assertEqual(result["records"][0]["table"], "table") - - self.assertTrue(len(result["errors"]), 1) - self.assertEqual(result["errors"][0]['error']['code'], - failed_response["error"]['http_code']) - self.assertEqual(result["errors"][0]['error']['description'], - failed_response["error"]['message']) - - def testCreateResponseBodyInvalidJson(self): - response = "invalid json" - self.add_mock_response(response.encode(), 200, 'table', encode=False) - - try: - createGetResponseBody(self.mocked_futures) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % response) \ No newline at end of file diff --git a/tests/vault/test_insert.py b/tests/vault/test_insert.py deleted file mode 100644 index c39e8e3c..00000000 --- a/tests/vault/test_insert.py +++ /dev/null @@ -1,649 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -from requests.models import Response -from dotenv import dotenv_values -from skyflow.vault._insert import getInsertRequestBody, processResponse, convertResponse, getUpsertColumn, validateUpsertOptions -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, InsertOptions, UpsertOption, BYOT - - -class TestInsert(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - record = { - "table": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens":{ - "cardNumber": "4111-1111-1111-1111", - } - } - self.data = {"records": [record]} - self.mockRequest = {"records": [record]} - record2 = { - "table": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - } - } - self.data2 = {"records": [record2]} - self.mockRequest2 = {"records": [record2]} - - self.mockResponse = { - "responses": [ - { - "records": [ - { - "skyflow_id": 123, - "tokens": { - "first_name": "4db12c22-758e-4fc9-b41d-e8e48b876776", - "cardNumber": "card_number_token", - "cvv": "cvv_token", - "expiry_date": "6b45daa3-0e81-42a8-a911-23929f1cf9da" - - } - } - ], - } - ], - "requestId": "2g3fd14-z9bs-xnvn4k6-vn1s-e28w35" - } - - self.mockResponseCOESuccessObject = { - "Body": { - "records": self.mockResponse['responses'][0]['records'] - }, - "Status": 200 - } - - self.mockResponseCOEErrorObject = { - "Body": { - "error": "Error Inserting Records due to unique constraint violation" - }, - "Status": 400 - } - - self.mockResponseCOESuccess = { - "responses": [self.mockResponseCOESuccessObject], - "requestId": self.mockResponse['requestId'] - } - - self.mockResponseCOEPartialSuccess = { - "responses": [ - self.mockResponseCOESuccessObject, - self.mockResponseCOEErrorObject - ], - "requestId": self.mockResponse['requestId'] - } - - self.mockResponseCOEFailure = { - "responses": [self.mockResponseCOEErrorObject], - "requestId": self.mockResponse['requestId'] - } - - self.insertOptions = InsertOptions(tokens=True) - self.insertOptions2 = InsertOptions(tokens=True, byot=BYOT.ENABLE) - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetInsertRequestBodyWithValidBody(self): - body = json.loads(getInsertRequestBody(self.data, self.insertOptions2)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens":{ - "cardNumber": "4111-1111-1111-1111", - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidBodyWithoutTokens(self): - body = json.loads(getInsertRequestBody(self.data2, self.insertOptions)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidUpsertOptions(self): - body = json.loads(getInsertRequestBody(self.data, InsertOptions(True,[UpsertOption(table='pii_fields',column='column1')], byot=BYOT.ENABLE))) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens": { - "cardNumber": "4111-1111-1111-1111", - }, - "method": 'POST', - "quorum": True, - "tokenization": True, - "upsert": 'column1', - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidUpsertOptionsWithOutTokens(self): - body = json.loads(getInsertRequestBody(self.data2, InsertOptions(True,[UpsertOption(table='pii_fields',column='column1')]))) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "method": 'POST', - "quorum": True, - "tokenization": True, - "upsert": 'column1', - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyNoRecords(self): - invalidData = {"invalidKey": self.data["records"]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetInsertRequestBodyRecordsInvalidType(self): - invalidData = {"records": 'records'} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str(type('str')))) - - def testGetInsertRequestBodyNoFields(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - } - }, - { - "table": "table", - "invalid": {} - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FIELDS_KEY_ERROR.value) - - def testGetInsertRequestBodyInvalidFieldsType(self): - invalidData = {"records": [{ - "table": "table", - "fields": 'fields' - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % (str(type('str')))) - - def testInvalidTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": "tokens" - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TOKENS_TYPE.value % (str(type('str')))) - - def testEmptyTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": { - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_TOKENS_IN_INSERT.value) - - def testMismatchTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": { - "cvv": "123" - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS.value) - - # def testTokensInRecord(self): - # invalidData = {"records": [{ - # "table": "table", - # "fields": { - # "card_number": "4111-1111" - # }, - # "tokens": { - # "cvv": "123" - # } - # } - # ]} - # try: - # getInsertRequestBody(invalidData, self.insertOptions) - # self.fail('Should have thrown an error') - # except SkyflowError as e: - # self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - # self.assertEqual( - # e.message, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS.value) - - def testGetInsertRequestBodyWithTokensValidBody(self): - body = json.loads(getInsertRequestBody(self.data, self.insertOptions2)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens": { - "cardNumber": "4111-1111-1111-1111", - - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyNoTable(self): - invalidData = {"records": [{ - "noTable": "tableshouldbehere", - "fields": { - "card_number": "4111-1111" - } - }, - { - "table": "table", - "invalid": {} - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetInsertRequestBodyInvalidTableType(self): - invalidData = {"records": [{ - "table": {'invalidtype': 'thisisinvalid'}, - "fields": { - "card_number": "4111-1111" - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - str(type({'a': 'b'})))) - - def testGetInsertRequestBodyWithContinueOnErrorAsTrue(self): - try: - options = InsertOptions(tokens=True, continueOnError=True, byot=BYOT.ENABLE) - request = getInsertRequestBody(self.data, options) - self.assertIn('continueOnError', request) - request = json.loads(request) - self.assertEqual(request['continueOnError'], True) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testGetInsertRequestBodyWithContinueOnErrorAsFalse(self): - try: - options = InsertOptions(tokens=True, continueOnError=False, byot=BYOT.ENABLE) - request = getInsertRequestBody(self.data, options) - # assert 'continueOnError' in request - self.assertIn('continueOnError', request) - request = json.loads(request) - self.assertEqual(request['continueOnError'], False) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testGetInsertRequestBodyWithoutContinueOnError(self): - try: - request = getInsertRequestBody(self.data, self.insertOptions2) - # assert 'continueOnError' not in request - self.assertNotIn('continueOnError', request) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testInsertInvalidJson(self): - invalidjson = { - "records": [{ - "table": "table", - "fields": { - "invalid": json - } - }] - } - - try: - getInsertRequestBody(invalidjson, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_JSON.value % ('insert payload')) - - def testProcessInvalidResponse(self): - response = Response() - response.status_code = 500 - response._content = b"Invalid Request" - try: - processResponse(response) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % - response.content.decode('utf-8')) - - def testProcessValidResponse(self): - response = Response() - response.status_code = 200 - response._content = b'{"key": "value"}' - try: - responseDict = processResponse(response) - self.assertDictEqual(responseDict, {'key': 'value'}) - except SkyflowError as e: - self.fail() - - def testClientInit(self): - config = Configuration( - 'vaultid', 'https://skyflow.com', lambda: 'test') - client = Client(config) - self.assertEqual(client.vaultURL, 'https://skyflow.com') - self.assertEqual(client.vaultID, 'vaultid') - self.assertEqual(client.tokenProvider(), 'test') - - def testProcessResponseInvalidJson(self): - invalid_response = Response() - invalid_response.status_code = 200 - invalid_response._content = b'invalid-json' - try: - processResponse(invalid_response) - self.fail('not failing on invalid json') - except SkyflowError as se: - self.assertEqual(se.code, 200) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'invalid-json') - - def testProcessResponseFail(self): - invalid_response = Response() - invalid_response.status_code = 404 - invalid_response._content = b"error" - try: - processResponse(invalid_response) - self.fail('Not failing on invalid error json') - except SkyflowError as se: - self.assertEqual(se.code, 404) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'error') - - def testConvertResponseNoTokens(self): - options = InsertOptions(tokens=False) - result, partial = convertResponse(self.mockRequest, self.mockResponse, options) - self.assertFalse(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["table"], "pii_fields") - self.assertEqual(result["records"][0]["request_index"], 0) - self.assertNotIn("tokens", result["records"][0]) - - def testConvertResponseWithTokens(self): - options = InsertOptions(tokens=True) - result, partial = convertResponse(self.mockRequest, self.mockResponse, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["request_index"], 0) - - self.assertEqual(result["records"][0]["fields"] - ["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"] - ["cvv"], "cvv_token") - - def testConvertResponseWithContinueoOnErrorSuccess(self): - options = InsertOptions(tokens=True, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOESuccess, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("errors", result) - - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["fields"]["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"]["cvv"], "cvv_token") - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - def testConvertResponseWithContinueoOnErrorAndNoTokensSuccess(self): - options = InsertOptions(tokens=False, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOESuccess, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("errors", result) - - self.assertIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["skyflow_id"], 123) - - self.assertIn("table", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertNotIn("fields", result["records"][0]) - self.assertNotIn("tokens", result["records"][0]) - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - def testConvertResponseWithContinueoOnErrorPartialSuccess(self): - options = InsertOptions(tokens=True, continueOnError=True) - partialSuccessRequest = { - "records": [ - self.mockRequest['records'][0], - self.mockRequest['records'][0], - ] - } - result, partial = convertResponse(partialSuccessRequest, self.mockResponseCOEPartialSuccess, options) - self.assertTrue(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertEqual(len(result["errors"]), 1) - - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["fields"]["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"]["cvv"], "cvv_token") - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - message = self.mockResponseCOEErrorObject['Body']['error'] - message += ' - request id: ' + self.mockResponse['requestId'] - self.assertEqual(result["errors"][0]["error"]["code"], 400) - self.assertEqual(result["errors"][0]["error"]["description"], message) - - self.assertIn("request_index", result["errors"][0]["error"]) - self.assertEqual(result["errors"][0]["error"]["request_index"], 1) - - def testConvertResponseWithContinueoOnErrorFailure(self): - options = InsertOptions(tokens=True, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOEFailure, options) - self.assertFalse(partial) - - self.assertEqual(len(result["errors"]), 1) - self.assertNotIn("records", result) - - message = self.mockResponseCOEErrorObject['Body']['error'] - message += ' - request id: ' + self.mockResponse['requestId'] - self.assertEqual(result["errors"][0]["error"]["code"], 400) - self.assertEqual(result["errors"][0]["error"]["description"], message) - self.assertIn("request_index", result["errors"][0]["error"]) - self.assertEqual(result["errors"][0]["error"]["request_index"], 0) - - def testInsertInvalidToken(self): - config = Configuration('id', 'url', lambda: 'invalid-token') - try: - Client(config).insert({'records': []}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) - - def testGetUpsertColumn(self): - testUpsertOptions = [UpsertOption(table='table1',column='column1'), - UpsertOption(table='table2',column='column2')] - upsertValid = getUpsertColumn('table1',upsertOptions=testUpsertOptions) - upsertInvalid = getUpsertColumn('table3',upsertOptions=testUpsertOptions) - self.assertEqual(upsertValid,'column1') - self.assertEqual(upsertInvalid,'') - - def testValidUpsertOptions(self): - testUpsertOptions = 'upsert_string' - try: - validateUpsertOptions(testUpsertOptions) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_OPTIONS_TYPE.value % type(testUpsertOptions) ) - try: - validateUpsertOptions(upsertOptions=[]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTIONS_LIST.value) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table=123,column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_TABLE_TYPE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='',column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_TABLE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='table1',column=1343)]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_COLUMN_TYPE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='table2',column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_COLUMN.value % 0) - - def testTokensPassedWithByotModeDisable(self): - try: - options = InsertOptions(byot=BYOT.DISABLE) - getInsertRequestBody(self.data, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.TOKENS_PASSED_FOR_BYOT_DISABLE.value) - - def testTokensNotPassedWithByotModeEnable(self): - try: - getInsertRequestBody(self.data2, self.insertOptions2) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % "ENABLE") - - def testTokensNotPassedWithByotModeEnableStrict(self): - try: - options = InsertOptions(byot=BYOT.ENABLE_STRICT) - getInsertRequestBody(self.data2, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % "ENABLE_STRICT") - - def testTokensPassedWithByotModeEnableStrict(self): - try: - options = InsertOptions(byot=BYOT.ENABLE_STRICT) - getInsertRequestBody(self.data, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value) diff --git a/tests/vault/test_invoke_connection.py b/tests/vault/test_invoke_connection.py deleted file mode 100644 index 15d562cd..00000000 --- a/tests/vault/test_invoke_connection.py +++ /dev/null @@ -1,148 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest - -from requests import request -from skyflow.service_account._token import generate_bearer_token -from skyflow.vault._connection import * -from skyflow.vault._client import * -from skyflow.vault._config import * -from skyflow.errors._skyflow_errors import * -from dotenv import dotenv_values - - -class testInvokeConnection(unittest.TestCase): - def testCreateRequestDefault(self): - config = ConnectionConfig('https://skyflow.com/', RequestMethod.GET) - try: - req = createRequest(config) - body, url, method = req.body, req.url, req.method - self.assertEqual(url, 'https://skyflow.com/') - self.assertEqual(body, '{}') - self.assertEqual(method, RequestMethod.GET.value) - except SkyflowError: - self.fail() - - def testCreateRequestInvalidJSONBody(self): - invalidJsonBody = {'somekey': unittest} - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestBody=invalidJsonBody) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REQUEST_BODY.value) - - def testCreateRequestInvalidBodyType(self): - nonDictBody = 'body' - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestBody=nonDictBody) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REQUEST_BODY.value) - - def testCreateRequestBodyInvalidHeadersJson(self): - invalidJsonHeaders = {'somekey': unittest} - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestHeader=invalidJsonHeaders) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_HEADERS.value) - - def testCreateRequestBodyHeadersNotDict(self): - invalidJsonHeaders = 'invalidheaderstype' - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestHeader=invalidJsonHeaders) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_HEADERS.value) - - def testCreateRequestInvalidURL(self): - invalidUrl = 'https::///skyflow.com' - config = ConnectionConfig(invalidUrl, RequestMethod.GET) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_URL.value % (invalidUrl)) - - def testPathParams(self): - try: - url = parsePathParams(url='https://skyflow.com/{name}/{department}/content/{action}', - pathParams={'name': 'john', 'department': 'test', 'action': 'download'}) - - expectedURL = 'https://skyflow.com/john/test/content/download' - - self.assertEqual(url, expectedURL) - except SkyflowError as e: - self.fail() - - def testVerifyParamsPathParamsNotDict(self): - pathParams = {'name': 'john', 'department': ['test'], 'action': 1} - try: - verifyParams({}, pathParams) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_PATH_PARAM_TYPE.value % ( - str(type('department')), str(type(['str'])))) - - def testVerifyParamsQueryParamsNotDict(self): - queryParams = {'name': 'john', 2: [json], 'action': 1} - try: - verifyParams(queryParams, {}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_QUERY_PARAM_TYPE.value % ( - str(type(2)), str(type(['str'])))) - - def testVerifyParamsInvalidPathParams(self): - pathParams = 'string' - try: - verifyParams({}, pathParams) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_PATH_PARAMS.value) - - def testVerifyParamsInvalidQueryParams(self): - queryParams = 'string' - try: - verifyParams(queryParams, {}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_QUERY_PARAMS.value) - - def testInvokeConnectionFailure(self): - config = Configuration('', '', lambda: 'token') - client = Client(config) - connectionConfig = ConnectionConfig( - 'url', RequestMethod.POST, requestBody=[]) - try: - client.invoke_connection(connectionConfig) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) diff --git a/tests/vault/test_query.py b/tests/vault/test_query.py deleted file mode 100644 index 63f90794..00000000 --- a/tests/vault/test_query.py +++ /dev/null @@ -1,175 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -from unittest import mock -import requests -from requests.models import Response -from skyflow.vault._query import getQueryRequestBody, getQueryResponse -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, QueryOptions - -class TestQuery(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - query = "SELECT * FROM pii_fields WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" - self.data = {"query": query} - self.mockRequest = {"records": [query]} - - self.mockResponse = { - "records": [ - { - "fields": { - "card_number": "XXXXXXXXXXXX1111", - "card_pin": "*REDACTED*", - "cvv": "", - "expiration_date": "*REDACTED*", - "expiration_month": "*REDACTED*", - "expiration_year": "*REDACTED*", - "name": "a***te", - "skyflow_id": "3ea3861-x107-40w8-la98-106sp08ea83f", - "ssn": "XXX-XX-6789", - "zip_code": None - }, - "tokens": None - } - ] - } - - self.requestId = '5d5d7e21-c789-9fcc-ba31-2a279d3a28ef' - - self.mockApiError = { - "error": { - "grpc_code": 13, - "http_code": 500, - "message": "ERROR (internal_error): Could not find Notebook Mapping Notebook Name was not found", - "http_status": "Internal Server Error", - "details": [] - } - } - - self.mockFailResponse = { - "error": { - "code": 500, - "description": "ERROR (internal_error): Could not find Notebook Mapping Notebook Name was not found - request id: 5d5d7e21-c789-9fcc-ba31-2a279d3a28ef" - } - } - - self.queryOptions = QueryOptions() - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetQueryRequestBodyWithValidBody(self): - body = json.loads(getQueryRequestBody(self.data, self.queryOptions)) - expectedOutput = { - "query": "SELECT * FROM pii_fields WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'", - } - self.assertEqual(body, expectedOutput) - - def testGetQueryRequestBodyNoQuery(self): - invalidData = {"invalidKey": self.data["query"]} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.QUERY_KEY_ERROR.value) - - def testGetQueryRequestBodyInvalidType(self): - invalidData = {"query": ['SELECT * FROM table_name']} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_QUERY_TYPE.value % (str(type(invalidData["query"])))) - - def testGetQueryRequestBodyEmptyBody(self): - invalidData = {"query": ''} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_QUERY.value) - - def testGetQueryValidResponse(self): - response = Response() - response.status_code = 200 - response._content = b'{"key": "value"}' - try: - responseDict = getQueryResponse(response) - self.assertDictEqual(responseDict, {'key': 'value'}) - except SkyflowError as e: - self.fail() - - def testClientInit(self): - config = Configuration( - 'vaultid', 'https://skyflow.com', lambda: 'test') - client = Client(config) - self.assertEqual(client.vaultURL, 'https://skyflow.com') - self.assertEqual(client.vaultID, 'vaultid') - self.assertEqual(client.tokenProvider(), 'test') - - def testGetQueryResponseSuccessInvalidJson(self): - invalid_response = Response() - invalid_response.status_code = 200 - invalid_response._content = b'invalid-json' - try: - getQueryResponse(invalid_response) - self.fail('not failing on invalid json') - except SkyflowError as se: - self.assertEqual(se.code, 200) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'invalid-json') - - def testGetQueryResponseFailInvalidJson(self): - invalid_response = mock.Mock( - spec=requests.Response, - status_code=404, - content=b'error' - ) - invalid_response.raise_for_status.side_effect = requests.exceptions.HTTPError("Not found") - try: - getQueryResponse(invalid_response) - self.fail('Not failing on invalid error json') - except SkyflowError as se: - self.assertEqual(se.code, 404) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'error') - - def testGetQueryResponseFail(self): - response = mock.Mock( - spec=requests.Response, - status_code=500, - content=json.dumps(self.mockApiError).encode('utf-8') - ) - response.headers = {"x-request-id": self.requestId} - response.raise_for_status.side_effect = requests.exceptions.HTTPError("Server Error") - try: - getQueryResponse(response) - self.fail('not throwing exception when error code is 500') - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.SERVER_ERROR.value) - self.assertDictEqual(e.data, self.mockFailResponse) - - def testQueryInvalidToken(self): - config = Configuration('id', 'url', lambda: 'invalid-token') - try: - Client(config).query({'query': 'SELECT * FROM table_name'}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) diff --git a/tests/vault/test_token_provider_wrapper.py b/tests/vault/test_token_provider_wrapper.py deleted file mode 100644 index 7d78a67a..00000000 --- a/tests/vault/test_token_provider_wrapper.py +++ /dev/null @@ -1,62 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest - -import dotenv -from skyflow.vault._token import tokenProviderWrapper -from skyflow.service_account import generate_bearer_token -from skyflow.errors._skyflow_errors import * - - -class TestTokenProviderWrapper(unittest.TestCase): - - def setUp(self) -> None: - return super().setUp() - - def testInvalidStoredToken(self): - env_values = dotenv.dotenv_values('.env') - - def tokenProvider(): - newerToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return newerToken - - try: - tokenProviderWrapper('invalid', tokenProvider, "Test") - self.fail('Should have thrown invalid jwt error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.JWT_DECODE_ERROR.value) - - def testNoStoredToken(self): - env_values = dotenv.dotenv_values('.env') - self.newToken = '' - - def tokenProvider(): - self.newToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return self.newToken - - try: - newerToken = tokenProviderWrapper('', tokenProvider, "Test") - self.assertEqual(newerToken, self.newToken) - except SkyflowError as e: - self.fail('Should have decoded token') - - def testStoredTokenNotExpired(self): - env_values = dotenv.dotenv_values('.env') - self.newerToken = '' - - def tokenProvider(): - self.newerToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return self.newerToken - - try: - newToken = tokenProviderWrapper( - tokenProvider(), tokenProvider, "Test") - self.assertEqual(newToken, self.newerToken) - except SkyflowError as e: - self.fail('Should have decoded token') diff --git a/tests/vault/test_update.py b/tests/vault/test_update.py deleted file mode 100644 index c6a00ef2..00000000 --- a/tests/vault/test_update.py +++ /dev/null @@ -1,184 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -import asyncio -import warnings - -from dotenv import dotenv_values -from skyflow.vault._client import Client, Configuration -from skyflow.vault._update import sendUpdateRequests, createUpdateResponseBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import UpdateOptions - - -class TestUpdate(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.mocked_futures = [] - self.event_loop = asyncio.new_event_loop() - - def tokenProvider(): - token, _ = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result((json.dumps(response).encode(), statusCode)) - else: - future.set_result((response, statusCode)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testUpdateNoRecords(self): - invalidData = {} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testUpdateInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testUpdateNoIds(self): - invalidData = {"records": [ - {"table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testUpdateInvalidIdType(self): - invalidData = {"records": [ - {"id": ["123"], "table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (list)) - - def testUpdateNoTable(self): - invalidData = {"records": [ - {"id": "id"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testUpdateInvalidTableType(self): - invalidData = {"records": [ - {"id": "id1", "table": ["invalid"]}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testUpdateNoFields(self): - invalidData = {"records": [ - {"id": "id", "table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FIELDS_KEY_ERROR.value) - - def testUpdateInvalidFieldsType(self): - invalidData = {"records": [ - {"id": "id1", "table": "pii_fields", "fields": "invalid"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % (str)) - - def testUpdateInvalidFieldsType2(self): - invalidData = {"records": [ - {"id": "id1", "table": "pii_fields", "fields": {}}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UPDATE_FIELD_KEY_ERROR.value) - - def testResponseBodySuccess(self): - response = {"skyflow_id": "123", "tokens": {"first_name": "John"}} - mock_response = [{"id": "123", "fields": {"first_name": "John"}}] - self.add_mock_response(response, 200) - print("Seld.mockedFuturs", self.mocked_futures) - res, partial = createUpdateResponseBody(self.mocked_futures) - self.assertEqual(partial, False) - self.assertEqual(res, {"records": mock_response, "errors": []}) - - def testResponseBodyPartialSuccess(self): - success_response = {"skyflow_id": "123", "tokens": {"first_name": "John"}} - mock_success_response = [{"id": "123", "fields": {"first_name": "John"}}] - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(success_response, 200) - self.add_mock_response(error_response, 404) - res, partial = createUpdateResponseBody(self.mocked_futures) - self.assertTrue(partial) - self.assertEqual(res["records"], mock_success_response) - errors = res["errors"] - - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseNotJson(self): - response = "not a valid json".encode() - self.add_mock_response(response, 200, encode=False) - try: - createUpdateResponseBody(self.mocked_futures) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % - response.decode('utf-8')) diff --git a/tests/vault/test_url_encoder.py b/tests/vault/test_url_encoder.py deleted file mode 100644 index 1e4c8443..00000000 --- a/tests/vault/test_url_encoder.py +++ /dev/null @@ -1,115 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import platform -import sys -import unittest -from unittest import mock -from skyflow._utils import http_build_query, getMetrics -from skyflow.version import SDK_VERSION - -class TestUrlEncoder(unittest.TestCase): - def setUp(self) -> None: - return super().setUp() - - def test_encoder_simple(self): - data = { - "key": "value" - } - - http_data = http_build_query(data) - self.assertEqual(http_data, "key=value") - - def test_encoder_multiplekeys(self): - data = { - "key": "value", - "key2": "value2" - } - - http_data = http_build_query(data) - self.assertEqual(http_data, "key=value&key2=value2") - - def test_encoder_nested(self): - data = { - "key": "value", - "nested": { - "key": "value" - } - } - - http_data = http_build_query(data) - - self.assertEqual(http_data, "key=value&nested%5Bkey%5D=value") - - def test_encoder_array(self): - data = { - "key": "value", - "nested": { - "array": ["one", "two"], - "key": "value" - } - } - http_data = http_build_query(data) - - self.assertEqual( - http_data, "key=value&nested%5Barray%5D%5B0%5D=one&nested%5Barray%5D%5B1%5D=two&nested%5Bkey%5D=value") - - # Test Case 1: Success case - def test_get_metrics(self): - expected = { - 'sdk_name_version': "skyflow-python@" + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version, - } - actual = getMetrics() - self.assertEqual(actual, expected) - - @mock.patch('platform.node', return_value='') - def test_getMetrics_no_device_model(self, mock_node): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': '', - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - - actual_output = getMetrics() - expected_output['sdk_client_device_model'] = '' - self.assertEqual(actual_output, expected_output) - - @mock.patch('platform.node', return_value='Mocked Device Model') - def test_getMetrics_with_device_model(self, mock_node): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': 'Mocked Device Model', - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output) - - @mock.patch('sys.platform', return_value='mocked_os') - def test_getMetrics_with_os_details(self, mock_platform): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output) - - def test_getMetrics_with_runtime_details(self): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': 'Python ' + 'mocked_version' - } - - with mock.patch('sys.version', 'mocked_version'), \ - mock.patch('sys.version_info', new=(3, 11, 2)): - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output) From 22d1962bfd2b1d43309ae6e70a34d22c645b1e16 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 9 Jan 2025 13:48:38 +0530 Subject: [PATCH 02/60] SK-1772: Update beta release workflow --- .github/workflows/beta-release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/beta-release.yml b/.github/workflows/beta-release.yml index 8fab88eb..af21bb4f 100644 --- a/.github/workflows/beta-release.yml +++ b/.github/workflows/beta-release.yml @@ -13,6 +13,6 @@ jobs: build-and-deploy: uses: ./.github/workflows/shared-build-and-deploy.yml with: - ref: main - pypi-token: ${{ secrets.PYPI_PUBLISH_TOKEN }} + ref: ${{ github.ref_name }} is-internal: false + secrets: inherit From 31773fc3689235b51c06a5fcdc770b38cb602a8d Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 9 Jan 2025 13:54:30 +0530 Subject: [PATCH 03/60] SK-1772: Update beta release workflow --- .github/workflows/shared-build-and-deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 9d349fbb..55e302c8 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -59,7 +59,7 @@ jobs: else COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin + git push origin ${{ github.ref_name }} -f fi - name: Build and Publish Package From a80205c6955c8d5e2a0a05e91e73fe2d7d71c348 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 10 Jan 2025 13:04:09 +0530 Subject: [PATCH 04/60] SK-1772: Updated beta release workflow file --- .github/workflows/shared-build-and-deploy.yml | 29 +++++-------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 55e302c8..0e55c7fa 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -17,10 +17,15 @@ jobs: build-and-deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - name: checkout branch with: - ref: ${{ inputs.ref }} fetch-depth: 0 + uses: actions/checkout@v1 + - name: Get Branch + run: | + BRANCH_NAME=$(git branch -r --contains ${{ github.sha }} | grep -o 'origin/.*' | sed 's|origin/||' | head -n 1) + echo "Branch Name: $BRANCH_NAME" + echo "branch_name=$BRANCH_NAME" >> $GITHUB_ENV - uses: actions/setup-python@v2 @@ -59,26 +64,8 @@ jobs: else COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin ${{ github.ref_name }} -f + git push origin $branch_name -f fi - - name: Build and Publish Package - if: ${{ !inputs.is-internal }} - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* - - - name: Build and Publish to JFrog Artifactory - if: ${{ inputs.is-internal }} - env: - TWINE_USERNAME: ${{ secrets.JFROG_USERNAME }} - TWINE_PASSWORD: ${{ secrets.JFROG_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload --repository-url https://prekarilabs.jfrog.io/artifactory/api/pypi/skyflow-python/ dist/* - \ No newline at end of file From 073e7993c5a3490eb559e4f43cf893d093210a63 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 10 Jan 2025 14:07:18 +0530 Subject: [PATCH 05/60] SK-1772: Updated beta release workflow file --- .github/workflows/shared-build-and-deploy.yml | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 0e55c7fa..716fe6f1 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -17,11 +17,12 @@ jobs: build-and-deploy: runs-on: ubuntu-latest steps: - - name: checkout branch + - name: Checkout branch + uses: actions/checkout@v1 with: fetch-depth: 0 - uses: actions/checkout@v1 - - name: Get Branch + + - name: Get Branch Name run: | BRANCH_NAME=$(git branch -r --contains ${{ github.sha }} | grep -o 'origin/.*' | sed 's|origin/||' | head -n 1) echo "Branch Name: $BRANCH_NAME" @@ -29,12 +30,12 @@ jobs: - uses: actions/setup-python@v2 - - name: Install dependencies + - name: Install Dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel twine - - name: Get Previous tag + - name: Get Previous Tag id: previoustag uses: WyriHaximus/github-action-get-previous-tag@v1 with: @@ -49,7 +50,7 @@ jobs: ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" fi - - name: Commit changes + - name: Commit Changes run: | git config user.name "${{ github.actor }}" git config user.email "${{ github.actor }}@users.noreply.github.com" @@ -60,12 +61,9 @@ jobs: VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" git commit -m "$COMMIT_MESSAGE" - git push origin ${{ github.ref_name }} -f + git push origin HEAD:${{ env.branch_name }} --force else COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin $branch_name -f + git push origin HEAD:${{ env.branch_name }} --force fi - - - \ No newline at end of file From d7f96a01f5c490a9402034f8ab3290601c903eae Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 10 Jan 2025 14:13:29 +0530 Subject: [PATCH 06/60] SK-1772: Updated beta release workflow file --- .github/workflows/shared-build-and-deploy.yml | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 716fe6f1..58f84d1b 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -17,25 +17,24 @@ jobs: build-and-deploy: runs-on: ubuntu-latest steps: - - name: Checkout branch - uses: actions/checkout@v1 + - name: checkout branch with: fetch-depth: 0 - - - name: Get Branch Name + uses: actions/checkout@v1 + - name: Get Branch run: | BRANCH_NAME=$(git branch -r --contains ${{ github.sha }} | grep -o 'origin/.*' | sed 's|origin/||' | head -n 1) echo "Branch Name: $BRANCH_NAME" echo "branch_name=$BRANCH_NAME" >> $GITHUB_ENV + git checkout $branch_name - uses: actions/setup-python@v2 - - - name: Install Dependencies + - name: Install dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel twine - - name: Get Previous Tag + - name: Get Previous tag id: previoustag uses: WyriHaximus/github-action-get-previous-tag@v1 with: @@ -50,7 +49,7 @@ jobs: ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" fi - - name: Commit Changes + - name: Commit changes run: | git config user.name "${{ github.actor }}" git config user.email "${{ github.actor }}@users.noreply.github.com" @@ -61,9 +60,10 @@ jobs: VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" git commit -m "$COMMIT_MESSAGE" - git push origin HEAD:${{ env.branch_name }} --force + git push origin ${{ github.ref_name }} -f else COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin HEAD:${{ env.branch_name }} --force + git push origin ${{ env.branch_name }} -f fi + \ No newline at end of file From 718e08d157f6b214cb5db816d4f6ad76b753eb72 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 10 Jan 2025 14:19:55 +0530 Subject: [PATCH 07/60] SK-1772: Updated beta release workflow file --- .github/workflows/shared-build-and-deploy.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 58f84d1b..679851dd 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -64,6 +64,7 @@ jobs: else COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin ${{ env.branch_name }} -f + git push origin HEAD:refs/heads/${{ env.branch_name }} -f + fi \ No newline at end of file From 41b1c8c2d7c660191304e80bedad67cf66dac169 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 10 Jan 2025 15:20:07 +0530 Subject: [PATCH 08/60] SK-1772: Refactored workflows --- .github/workflows/beta-release.yml | 2 +- .github/workflows/internal-release.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/shared-build-and-deploy.yml | 47 ++++++++++++------- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 6 files changed, 34 insertions(+), 23 deletions(-) diff --git a/.github/workflows/beta-release.yml b/.github/workflows/beta-release.yml index af21bb4f..7ad03858 100644 --- a/.github/workflows/beta-release.yml +++ b/.github/workflows/beta-release.yml @@ -14,5 +14,5 @@ jobs: uses: ./.github/workflows/shared-build-and-deploy.yml with: ref: ${{ github.ref_name }} - is-internal: false + tag: 'beta' secrets: inherit diff --git a/.github/workflows/internal-release.yml b/.github/workflows/internal-release.yml index d4ad9400..2e273096 100644 --- a/.github/workflows/internal-release.yml +++ b/.github/workflows/internal-release.yml @@ -18,6 +18,6 @@ jobs: uses: ./.github/workflows/shared-build-and-deploy.yml with: ref: ${{ github.ref_name }} - is-internal: true + tag: 'internal' secrets: inherit \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 53894cd6..d062daf4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,5 +14,5 @@ jobs: uses: ./.github/workflows/shared-build-and-deploy.yml with: ref: main - is-internal: false + tag: 'public' secrets: inherit diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 679851dd..7d94b14f 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -7,26 +7,19 @@ on: description: 'Git reference to use (e.g., main or branch name)' required: true type: string - - is-internal: - description: 'Flag for internal release' + + tag: + description: 'Release Tag' required: true - type: boolean + type: string jobs: build-and-deploy: runs-on: ubuntu-latest steps: - - name: checkout branch + - uses: actions/checkout@v2 with: fetch-depth: 0 - uses: actions/checkout@v1 - - name: Get Branch - run: | - BRANCH_NAME=$(git branch -r --contains ${{ github.sha }} | grep -o 'origin/.*' | sed 's|origin/||' | head -n 1) - echo "Branch Name: $BRANCH_NAME" - echo "branch_name=$BRANCH_NAME" >> $GITHUB_ENV - git checkout $branch_name - uses: actions/setup-python@v2 - name: Install dependencies @@ -43,28 +36,46 @@ jobs: - name: Bump Version run: | chmod +x ./ci-scripts/bump_version.sh - if ${{ inputs.is-internal }}; then + if ${{ inputs.tag == 'internal' }}; then ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" "$(git rev-parse --short "$GITHUB_SHA")" else ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" fi - name: Commit changes + if: ${{ inputs.tag == 'internal' || inputs.tag == 'public' }} run: | git config user.name "${{ github.actor }}" git config user.email "${{ github.actor }}@users.noreply.github.com" git add setup.py git add skyflow/utils/_version.py - if [ "${{ inputs.is-internal }}" = "true" ]; then + if [ "${{ inputs.tag }}" = "internal" ]; then VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" git commit -m "$COMMIT_MESSAGE" git push origin ${{ github.ref_name }} -f - else + fi + if [ "${{ inputs.tag }}" = "public" ]; then COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin HEAD:refs/heads/${{ env.branch_name }} -f - + git push origin fi - \ No newline at end of file + + - name: Build and Publish Package + if: ${{ inputs.tag == 'beta' || inputs.tag == 'public' }} + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + + - name: Build and Publish to JFrog Artifactory + if: ${{ inputs.tag == 'internal' }} + env: + TWINE_USERNAME: ${{ secrets.JFROG_USERNAME }} + TWINE_PASSWORD: ${{ secrets.JFROG_PASSWORD }} + run: | + python setup.py sdist bdist_wheel + twine upload --repository-url https://prekarilabs.jfrog.io/artifactory/api/pypi/skyflow-python/ dist/* \ No newline at end of file diff --git a/setup.py b/setup.py index 650c3de6..55c45d4f 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.1.dev0+9eff324' +current_version = '2.0.0b1' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 112e619d..64d4c6b5 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.1.dev0+9eff324' \ No newline at end of file +SDK_VERSION = '2.0.0b1' \ No newline at end of file From 09f524d1eed2248e7524f6d2eab3ae0f84360893 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Thu, 16 Jan 2025 16:59:30 +0530 Subject: [PATCH 09/60] SK-1821: Refactor release pipelines (#153) * SK-1821: Refactor release pipelines --- .github/workflows/shared-build-and-deploy.yml | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 7d94b14f..1b0309dd 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -27,6 +27,22 @@ jobs: python -m pip install --upgrade pip pip install setuptools wheel twine + - name: Resolve Branch for the Tagged Commit + id: resolve-branch + if: ${{ inputs.tag == 'beta' || inputs.tag == 'public' }} + run: | + TAG_COMMIT=$(git rev-list -n 1 ${{ github.ref_name }}) + + BRANCH_NAME=$(git branch -r --contains $TAG_COMMIT | grep -o 'origin/.*' | sed 's|origin/||' | head -n 1) + + if [ -z "$BRANCH_NAME" ]; then + echo "Error: Could not resolve branch for the tag." + exit 1 + fi + + echo "Resolved Branch Name: $BRANCH_NAME" + echo "branch_name=$BRANCH_NAME" >> $GITHUB_ENV + - name: Get Previous tag id: previoustag uses: WyriHaximus/github-action-get-previous-tag@v1 @@ -43,23 +59,27 @@ jobs: fi - name: Commit changes - if: ${{ inputs.tag == 'internal' || inputs.tag == 'public' }} run: | git config user.name "${{ github.actor }}" git config user.email "${{ github.actor }}@users.noreply.github.com" + + if [[ "${{ inputs.tag }}" == "beta" || "${{ inputs.tag }}" == "public" ]]; then + git checkout ${{ env.branch_name }} + fi + git add setup.py git add skyflow/utils/_version.py - if [ "${{ inputs.tag }}" = "internal" ]; then + if [[ "${{ inputs.tag }}" == "internal" ]]; then VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" git commit -m "$COMMIT_MESSAGE" git push origin ${{ github.ref_name }} -f fi - if [ "${{ inputs.tag }}" = "public" ]; then + if [[ "${{ inputs.tag }}" == "beta" || "${{ inputs.tag }}" == "public" ]]; then COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" git commit -m "$COMMIT_MESSAGE" - git push origin + git push origin ${{ env.branch_name }} fi - name: Build and Publish Package From e923868a5f36647248098de6f516475a7ece8b54 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Fri, 7 Feb 2025 12:55:18 +0530 Subject: [PATCH 10/60] SK-1863: Added a migration guide section in the README for transitioning from v1 to v2. (#155) * SK-1863: Added migration from v1 to v2 section in readme --- README.md | 244 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 244 insertions(+) diff --git a/README.md b/README.md index bfdab7a3..f3d07545 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ This Python SDK is designed to help developers easily implement Skyflow into the - [Requirements](#requirements) - [Configuration](#configuration) - [Service Account Bearer Token Generation](#service-account-bearer-token-generation) + - [Migration from v1 to v2](#migrate-from-v1-to-v2) - [Vault APIs](#vault-apis) - [Insert data into the vault](#insert-data-into-the-vault) - [Detokenize](#detokenize) @@ -328,6 +329,249 @@ try: except SkyflowError as e: print(e) ``` +## Migrate from V1 to V2 + +Below are the steps to migrate the Python SDK from V1 to V2. + +### 1. Authentication Options + +In V2, we have introduced multiple authentication options. +You can now provide credentials in the following ways: + +- **API Key (Recommended)** +- **Environment Variable** (`SKYFLOW_CREDENTIALS`) (**Recommended**) +- **Path to your credentials JSON file** +- **Stringified JSON of your credentials** +- **Bearer token** + +These options allow you to choose the authentication method that best suits your use case. + +#### V1 (Old): +Passing the token provider function below as a parameter to the Configuration. + +```python +# User defined function to provide access token to the vault apis +def token_provider(): + global bearerToken + if !(is_expired(bearerToken)): + return bearerToken + bearerToken, _ = generate_bearer_token('') + return bearerToken +``` + +#### V2 (New): +Passing one of the following: + +```python +# Option 1: API Key (Recommended) +credentials = { + 'api_key': '', # API key +} + +# Option 2: Environment Variables (Recommended) +# Set SKYFLOW_CREDENTIALS in your environment + +# Option 3: Credentials File +credentials = { + 'path': '', # Path to credentials file +} + +# Option 4: Stringified JSON +credentials = { + 'credentials_string': '', # Credentials as string +} + +# Option 5: Bearer Token +credentials = { + 'token': '', # Bearer token +} +``` + +**Notes:** +- Use only ONE authentication method. +- API Key or Environment Variables are recommended for production use. +- Secure storage of credentials is essential. +- For overriding behavior and priority order of credentials, please refer to the README. + +### 2. Client Initialization + +In V2, we have introduced a Builder design pattern for client initialization and added support for multi-vault. This allows you to configure multiple vaults during client initialization. + +In V2, the log level is tied to each individual client instance. + +During client initialization, you can pass the following parameters: + +- **`vault_id`** and **`cluster_id`**: These values are derived from the vault ID & vault URL. +- **`env`**: Specify the environment (e.g., SANDBOX or PROD). +- **`credentials`**: The necessary authentication credentials. + +#### V1 (Old): + +```python +# Initializing a Skyflow Client instance with a SkyflowConfiguration object +config = Configuration('', '', token_provider) +client = Client(config) +``` + +#### V2 (New): + +```python +# Initializing a Skyflow Client instance +client = ( + Skyflow.builder() + .add_vault_config({ + 'vault_id': '', # Primary vault + 'cluster_id': '', # ID from your vault URL e.g., https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials # Individual credentials + }) + .add_skyflow_credentials(credentials) # Skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) +``` + +**Key Changes:** +- `vault_url` replaced with `cluster_Id`. +- Added environment specification (`env`). +- Instance-specific log levels. + +### 3. Request & Response Structure + +In V2, with the introduction of constructor parameters, you can now pass parameters to `InsertRequest`. This request need +- **`table_name`**: The name of the table. +- **`values`**: An array of objects containing the data to be inserted. +The response will be of type `InsertResponse` class, which contains `inserted_fields` and errors. + +#### V1 (Old): Request Building + +```python +client.insert( + { + "records": [ + { + "table": "cards", + "fields": { + "cardNumber": "41111111111", + "cvv": "123", + }, + } + ] + }, + InsertOptions(True), +) +``` + +#### V2 (New): Request Building + +```python +# Prepare Insertion Data +insert_data = [ + { + 'card_number': '', + 'cvv': '', + }, +] + +table_name = '' # Replace with your actual table name + +# Create Insert Request +insert_request = InsertRequest( + table_name=table_name, + values=insert_data, + return_tokens=True, # Optional: Get tokens for inserted data + continue_on_error=True # Optional: Continue on partial errors +) + +# Perform Secure Insertion +response = skyflow_client.vault(primary_vault_config.get('')).insert(insert_request) +``` + +#### V1 (Old): Response Structure + +```json +{ + "records": [ + { + "table": "cards", + "fields": { + "cardNumber": "f3907186-e7e2-466f-91e5-48e12c2bcbc1", + "cvv": "1989cb56-63da-4482-a2df-1f74cd0dd1a5", + "skyflow_id": "d863633c-8c75-44fc-b2ed-2b58162d1117" + }, + "request_index": 0 + } + ] +} +``` + +#### V2 (New): Response Structure + +```python +InsertResponse( + inserted_fields=[ + { + 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', + 'card_number': '5479-4229-4622-1393' + } + ], + errors=[] +) +``` + +### 4. Request Options + +In V2, we have introduced constructor parameters, allowing you to set options as key-value pairs as parameters in request. + +#### V1 (Old): + +```python +options = InsertOptions( + tokens = True +) +``` + +#### V2 (New): + +```python +insert_request = InsertRequest( + table_name=table_name, + values=insert_data, + return_tokens=True, # Optional: Get tokens for inserted data + continue_on_error=True # Optional: Continue on partial errors +) +``` + +### 5. Request Options + +In V2, we have enriched the error details to provide better debugging capabilities. +The error response now includes: +- **http_status**: The HTTP status code. +- **grpc_code**: The gRPC code associated with the error. +- **details & message**: A detailed description of the error. +- **request_id**: A unique request identifier for easier debugging. + +#### V1 (Old) Error Structure: + +```json +{ + "code": "", + "message": "" +} +``` + +#### V2 (New) Error Structure: + +```json +{ + "http_status": "", + "grpc_code": "", + "http_code": "", + "message": "", + "request_id": "", + "details": [ "
" ] +} +``` ## Vault APIs From aca63e1be5e22a960197a8de12dcf70351b3ab9d Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Tue, 18 Feb 2025 10:40:22 +0530 Subject: [PATCH 11/60] SK-1894: Update README for Python SDK V2 (#157) * SK-1894: Update readme --- README.md | 2193 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 1476 insertions(+), 717 deletions(-) diff --git a/README.md b/README.md index f3d07545..af1e79f4 100644 --- a/README.md +++ b/README.md @@ -1,49 +1,61 @@ -# Skyflow-python +# Skyflow Python ---- - -## Description - -This Python SDK is designed to help developers easily implement Skyflow into their python backend. +The Skyflow Python SDK is designed to help with integrating Skyflow into a Python backend. ## Table of Contents -- [Skyflow-python](#skyflow-python) - - [Description](#description) - - [Table of Contents](#table-of-contents) - - [Features](#features) - - [Installation](#installation) +- [Table of Contents](#table-of-contents) +- [Overview](#overview) +- [Install](#installation) - [Requirements](#requirements) - [Configuration](#configuration) - - [Service Account Bearer Token Generation](#service-account-bearer-token-generation) - - [Migration from v1 to v2](#migrate-from-v1-to-v2) - - [Vault APIs](#vault-apis) +- [Migration from v1 to v2](#migration-from-v1-to-v2) + - [Authentication options](#1-authentication-options) + - [Initializing the client](#2-initializing-the-client) + - [Request & response structure](#3-request--response-structure) + - [Request options](#4-request-options) + - [Error structure](#5-error-structure) +- [Quickstart](#quickstart) + - [Authenticate](#authenticate) + - [Initialize the client](#initialize-the-client) + - [Insert data into the vault](#insert-data-into-the-vault) +- [Vault](#vault-apis) - [Insert data into the vault](#insert-data-into-the-vault) - [Detokenize](#detokenize) - [Tokenize](#tokenize) - [Get](#get) - - [Get By Id](#get-by-id) - - [Redaction Types](#redaction-types) + - [Get by skyflow IDs](#get-by-skyflow-ids) + - [Get tokens](#get-tokens) + - [Get by column name and column values](#get-by-column-name-and-column-values) + - [Redaction types](#redaction-types) - [Update](#update) - [Delete](#delete) - [Invoke Connection](#invoke-connection) - [Query](#query) - - [Logging](#logging) - - [Reporting a Vulnerability](#reporting-a-vulnerability) +- [Connections](#connections) + - [Invoke a connection](#invoke-a-connection) +- [Authenticate with bearer tokens](#authenticate-with-bearer-tokens) + - [Generate a bearer token](#generate-a-bearer-token) + - [Generate bearer tokens with context](#generate-bearer-tokens-with-context) + - [Generate scoped bearer tokens](#generate-scoped-bearer-tokens) + - [Generate signed data tokens](#generate-signed-data-tokens) +- [Logging](#logging) +- [Reporting a Vulnerability](#reporting-a-vulnerability) + +## Overview -## Features +- Authenticate using a Skyflow service account and generate bearer tokens for secure access. -Authentication with a Skyflow Service Account and generation of a bearer token +- Perform Vault API operations such as inserting, retrieving, and tokenizing sensitive data with ease. -Vault API operations to insert, retrieve and tokenize sensitive data +- Invoke connections to third-party APIs without directly handling sensitive data, ensuring compliance and data protection. -Invoking connections to call downstream third party APIs without directly handling sensitive data ## Installation ### Requirements -- Python 3.8.0 and above +- Python 3.8.0 and above (tested with Python 3.8.0) ### Configuration @@ -52,302 +64,24 @@ The package can be installed using pip: ```bash pip install skyflow ``` - -## Service Account Bearer Token Generation - -The [Service Account](https://github.com/skyflowapi/skyflow-python/tree/main/skyflow/service_account) python module is used to generate service account tokens from service account credentials file which is downloaded upon creation of service account. The token generated from this module is valid for 60 minutes and can be used to make API calls to vault services as well as management API(s) based on the permissions of the service account. - -The `generate_bearer_token(filepath)` function takes the credentials file path for token generation, alternatively, you can also send the entire credentials as string, by using `generate_bearer_token_from_creds(credentials)` - -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -def token_provider(): - global bearer_token - global token_type - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_toke('') - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -def token_provider(): - global bearer_token - global token_type - # As an example - skyflow_credentials = { - 'clientID': '', - 'clientName': '', - 'tokenURI': '', - 'keyID': '', - 'privateKey': '', - } - credentials_string = json.dumps(skyflow_credentials) - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -## Service Account Scoped Token Generation - -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] -} -def token_provider(): - global bearer_token - global token_type - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token('', options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] -} -def token_provider(): - global bearer_token - global token_type - # As an example - skyflow_credentials = { - 'clientID': '', - 'clientName': '', - 'tokenURI': '', - 'keyID': '', - 'privateKey': '', - } - credentials_string = json.dumps(skyflow_credentials) - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -## Service Account Token Generation With Context - -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'ctx': "" -} -def token_provider(): - global bearer_token - global token_type - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token('', options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'ctx': '' -} -def token_provider(): - global bearer_token - global token_type - # As an example - skyflow_credentials = { - 'clientID': '', - 'clientName': '', - 'tokenURI': '', - 'keyID': '', - 'privateKey': '', - } - credentials_string = json.dumps(skyflow_credentials) - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -## Service Account Signed Token Generation - -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'ctx': 'CONTEX_ID', - 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], - 'time_to_live': 90 # in seconds -} -def token_provider(): - global bearer_token - global token_type - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token('', options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) - -``` - -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): - -```python -from skyflow.error import SkyflowError -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired - -# cache token for reuse -bearer_token = '' -token_type = '' -options = { - 'ctx': 'CONTEX_ID', - 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], - 'time_to_live': 90 # in seconds -} -def token_provider(): - global bearer_token - global token_type - # As an example - skyflow_credentials = { - 'clientID': '', - 'clientName': '', - 'tokenURI': '', - 'keyID': '', - 'privateKey': '', - } - credentials_string = json.dumps(skyflow_credentials) - - if is_expired(bearer_token): - bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) - return bearer_token, token_type - -try: - bearer_token, token_type = token_provider() - print('Access Token:', bearer_token) - print('Type of token:', token_type) -except SkyflowError as e: - print(e) -``` -## Migrate from V1 to V2 +## Migration from V1 to V2 Below are the steps to migrate the Python SDK from V1 to V2. -### 1. Authentication Options +### Authentication Options In V2, we have introduced multiple authentication options. You can now provide credentials in the following ways: -- **API Key (Recommended)** -- **Environment Variable** (`SKYFLOW_CREDENTIALS`) (**Recommended**) +- **Passing credentials in ENV.** (`SKYFLOW_CREDENTIALS`) (**Recommended**) +- **API Key** - **Path to your credentials JSON file** - **Stringified JSON of your credentials** - **Bearer token** These options allow you to choose the authentication method that best suits your use case. -#### V1 (Old): -Passing the token provider function below as a parameter to the Configuration. +#### V1 (Old): Passing the token provider function below as a parameter to the Configuration. ```python # User defined function to provide access token to the vault apis @@ -359,8 +93,7 @@ def token_provider(): return bearerToken ``` -#### V2 (New): -Passing one of the following: +#### V2 (New): Passing one of the following: ```python # Option 1: API Key (Recommended) @@ -391,9 +124,9 @@ credentials = { - Use only ONE authentication method. - API Key or Environment Variables are recommended for production use. - Secure storage of credentials is essential. -- For overriding behavior and priority order of credentials, please refer to the README. +- For overriding behavior and priority order of credentials, please refer to [Initialize the client](#initialize-the-client) section in [Quickstart](#quickstart). -### 2. Client Initialization +### Initializing the client In V2, we have introduced a Builder design pattern for client initialization and added support for multi-vault. This allows you to configure multiple vaults during client initialization. @@ -409,7 +142,7 @@ During client initialization, you can pass the following parameters: ```python # Initializing a Skyflow Client instance with a SkyflowConfiguration object -config = Configuration('', '', token_provider) +config = Configuration('', '', token_provider) client = Client(config) ``` @@ -436,7 +169,7 @@ client = ( - Added environment specification (`env`). - Instance-specific log levels. -### 3. Request & Response Structure +### Request & Response Structure In V2, with the introduction of constructor parameters, you can now pass parameters to `InsertRequest`. This request need - **`table_name`**: The name of the table. @@ -519,7 +252,7 @@ InsertResponse( ) ``` -### 4. Request Options +### Request Options In V2, we have introduced constructor parameters, allowing you to set options as key-value pairs as parameters in request. @@ -535,20 +268,23 @@ options = InsertOptions( ```python insert_request = InsertRequest( - table_name=table_name, + table_name=table_name, # Replace with the table name values=insert_data, - return_tokens=True, # Optional: Get tokens for inserted data - continue_on_error=True # Optional: Continue on partial errors + return_tokens=False, # Do not return tokens + continue_on_error=False, # Stop inserting if any record fails + upsert='', # Replace with the column name used for upsert logic + token_mode=TokenMode.DISABLE, # Disable BYOT + tokens='' # Replace with tokens when TokenMode is ENABLE. ) ``` -### 5. Request Options +### Error Structure In V2, we have enriched the error details to provide better debugging capabilities. The error response now includes: - **http_status**: The HTTP status code. - **grpc_code**: The gRPC code associated with the error. -- **details & message**: A detailed description of the error. +- **details** & **message**: A detailed description of the error. - **request_id**: A unique request identifier for easier debugging. #### V1 (Old) Error Structure: @@ -562,119 +298,201 @@ The error response now includes: #### V2 (New) Error Structure: -```json +```typescript { "http_status": "", - "grpc_code": "", - "http_code": "", + "grpc_code": , + "http_code": , "message": "", - "request_id": "", + "request_id": "", "details": [ "
" ] } ``` -## Vault APIs +## Quickstart +Get started quickly with the essential steps: authenticate, initialize the client, and perform a basic vault operation. This section provides a minimal setup to help you integrate the SDK efficiently. -The vault python module is used to perform operations on the vault such as inserting records, detokenizing tokens, retrieving tokens for a skyflow_id and to invoke a connection. - -To use this module, the skyflow client must first be initialized as follows. +### Authenticate +You can use an API key to authenticate and authorize requests to an API. For authenticating via bearer tokens and different supported bearer token types, refer to the [Authenticate with bearer tokens](#authenticate-with-bearer-tokens) section. ```python -from skyflow import Env -from skyflow import Skyflow, LogLevel - -# To generate Bearer Token from credentials string. -skyflow_credentials = { - 'clientID': '', - 'clientName': '', - 'tokenURI': '', - 'keyID': '', - 'privateKey': '', - } -credentials_string = json.dumps(skyflow_credentials) - -# Pass one of api_key, token, credentials_string & path as credentials +# create a new credentials dictionary credentials = { - 'token': 'BEARER_TOKEN', # bearer token - # api_key: "API_KEY", # API_KEY - # path: "PATH", # path to credentials file - # credentials_string: credentials_string, # credentials as string + api_key: "", # add your API key in credentials } - -client = ( - Skyflow.builder() - .add_vault_config({ - 'vault_id': 'VAULT_ID', # primary vault - 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com - 'env': Env.PROD, # Env by default it is set to PROD - 'credentials': credentials # individual credentials - }) - .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed - .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR - .build() -) ``` -Notes: +### Initialize the client -- If both Skyflow common credentials and individual credentials at the configuration level are provided, the individual credentials at the configuration level will take priority. +To get started, you must first initialize the skyflow client. While initializing the skyflow client, you can specify different types of credentials. +**1. API keys** +- A unique identifier used to authenticate and authorize requests to an API. -All Vault APIs must be invoked using a client instance. +**2. Bearer tokens** +- A temporary access token used to authenticate API requests, typically included in the +Authorization header. -### Insert data into the vault +**3. Service account credentials file path** +- The file path pointing to a JSON file containing credentials for a service account, used +for secure API access. -To insert data into your vault, use the `insert` method. The `InsertRequest` class is used to create an insert request, which contains the values to be inserted in the form of a dictionary of records. Additionally, you can provide options in the insert request, such as returning tokenized data, upserting records, and continuing on error. +**4. Service account credentials string** +- A JSON-formatted string containing service account credentials, often used as an alternative to a file for programmatic authentication. -Insert call schema +Note: Only one type of credential can be used at a time. ```python -#Initialize Client -from skyflow.error import SkyflowError -from skyflow.vault.data import InsertRequest +import json +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env -try: - insert_data = [ - {'': ''}, - {'': ''} - ] +""" +Example program to initialize the Skyflow client with various configurations. +The Skyflow client facilitates secure interactions with the Skyflow vault, +such as securely managing sensitive data. +""" + +# Step 1: Define the primary credentials for authentication. +# Note: Only one type of credential can be used at a time. You can choose between: +# - API key +# - Bearer token +# - A credentials string (JSON-formatted) +# - A file path to a credentials file. + +# Initialize primary credentials using a Bearer token for authentication. +primary_credentials = { + 'token': '' # Replace with your actual authentication token. +} +# Step 2: Configure the primary vault details. +# VaultConfig stores all necessary details to connect to a specific Skyflow vault. +primary_vault_config = { + 'vault_id': '', # Replace with your primary vault's ID. + 'cluster_id': '', # Replace with the cluster ID (part of the vault URL, e.g., https://{clusterId}.vault.skyflowapis.com). + 'env': Env.PROD, # Set the environment (PROD, SANDBOX, STAGE, DEV). + 'credentials': primary_credentials # Attach the primary credentials to this vault configuration. +} - insert_request = InsertRequest( - table_name = '', - values = insert_data, - ) +# Step 3: Create credentials as a JSON object (if a Bearer Token is not provided). +# Demonstrates an alternate approach to authenticate with Skyflow using a credentials object. +skyflow_credentials = { + 'clientID': '', # Replace with your Client ID. + 'clientName': '', # Replace with your Client Name. + 'tokenURI': '', # Replace with the Token URI. + 'keyID': '', # Replace with your Key ID. + 'privateKey': '' # Replace with your Private Key. +} + +# Step 4: Convert the JSON object to a string and use it as credentials. +# This approach allows the use of dynamically generated or pre-configured credentials. +credentials_string = json.dumps(skyflow_credentials) # Converts JSON object to string for use as credentials. + +# Step 5: Define secondary credentials (API key-based authentication as an example). +# Demonstrates a different type of authentication mechanism for Skyflow vaults. +secondary_credentials = { + 'token': '' # Replace with your API Key for authentication. +} + +# Step 6: Configure the secondary vault details. +# A secondary vault configuration can be used for operations involving multiple vaults. +secondary_vault_config = { + 'vault_id': '', # Replace with your secondary vault's ID. + 'cluster_id': '', # Replace with the corresponding cluster ID. + 'env': Env.PROD, # Set the environment for this vault. + 'credentials': secondary_credentials # Attach the secondary credentials to this configuration. +} + +# Step 7: Define tertiary credentials using a path to a credentials JSON file. +# This method demonstrates an alternative authentication method. +tertiary_credentials = { + 'token': '' # Replace with the path to your credentials file. +} + +# Step 8: Configure the tertiary vault details. +tertiary_vault_config = { + 'vault_id': '', # Replace with the tertiary vault ID. + 'cluster_id': '', # Replace with the corresponding cluster ID. + 'env': Env.PROD, # Set the environment for this vault. + 'credentials': tertiary_credentials # Attach the tertiary credentials. +} + +# Step 9: Build and initialize the Skyflow client. +# Skyflow client is configured with multiple vaults and credentials. +skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) # Add the primary vault configuration. + .add_vault_config(secondary_vault_config) # Add the secondary vault configuration. + .add_vault_config(tertiary_vault_config) # Add the tertiary vault configuration. + .add_skyflow_credentials(skyflow_credentials) # Add JSON-formatted credentials if applicable. + .set_log_level(LogLevel.ERROR) # Set log level for debugging or monitoring purposes. + .build() +) + +# The Skyflow client is now fully initialized. +# Use the `skyflow_client` object to perform secure operations such as: +# - Inserting data +# - Retrieving data +# - Deleting data +# within the configured Skyflow vaults. - response = skyflow_client.vault('VAULT_ID').insert(insert_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) ``` +Notes +- If both Skyflow common credentials and individual credentials at the configuration level are specified, the individual credentials at the configuration level will take precedence. +- If neither Skyflow common credentials nor individual configuration-level credentials are provided, the SDK attempts to retrieve credentials from the SKYFLOW_CREDENTIALS environment variable. +- All Vault operations require a client instance. -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/insert_records.py)** +### Insert data into the vault +To insert data into your vault, use the `insert` method. The `InsertRequest` class creates an insert request, which includes the values to be inserted as a list of records. Below is a simple example to get started. For advanced options, check out [Insert data into the vault](#insert-data-into-the-vault-1) section. ```python from skyflow.error import SkyflowError from skyflow.vault.data import InsertRequest +""" + * This example demonstrates how to insert sensitive data (e.g., card information) into a Skyflow vault using the Skyflow client. + * + * 1. Initializes the Skyflow client. + * 2. Prepares a record with sensitive data (e.g., card number and cardholder name). + * 3. Creates an insert request for inserting the data into the Skyflow vault. + * 4. Prints the response of the insert operation. +""" + try: + # Step 1: Initialize data to be inserted into the Skyflow vault insert_data = [ - {'card_number': '4111111111111111'}, + { + 'card_number': '4111111111111111', # Replace with actual card number (sensitive data) + 'cardholder_name': 'John Doe', # Replace with actual cardholder name (sensitive data) + }, ] + # Step 2: Create Insert Request insert_request = InsertRequest( - table_name = 'table1', - values = insert_data, - return_tokens = True # returns tokens + table_name='table1', # Specify the table in the vault where the data will be inserted + values=insert_data, # Attach the data (records) to be inserted + return_tokens=True, # Specify if tokens should be returned upon successful insertion + continue_on_error=True # Optional: Continue on partial errors ) - response = client.vault('').insert(insert_request) - print("Response:", response) -except SkyflowError as e: - print("Error Occurred:", e) + # Step 3: Perform the insert operation using the Skyflow client + insert_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').insert(insert_request) + # Replace the vault ID "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID -``` + # Step 4: Print the response from the insert operation + print('Insert Response: ', insert_response) -Skyflow returns tokens for the record you just inserted. +except SkyflowError as error: + # Step 5: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` +Skyflow returns tokens for the record that was just inserted. ```python InsertResponse( @@ -682,37 +500,131 @@ InsertResponse( [ { 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', - 'card_number': '5479-4229-4622-1393' + 'card_number': '5484-7829-1702-9110', + 'cardholder_name': 'b2308e2a-c1f5-469b-97b7-1f193159399b' } ], errors=[] ) ``` -**Insert call example with `continue_on_error` option** + +## Vault + +The [Vault](https://github.com/skyflowapi/skyflow-python/tree/v2/skyflow/vault) module performs operations on the vault, including inserting records, detokenizing tokens, and retrieving tokens associated with a skyflow_id. + + +### Insert data into the vault + +Apart from using the `insert` method to insert data into your vault covered in [Quickstart](#quickstart), you can also specify options in `InsertRequest`, such as returning tokenized data, upserting records, or continuing the operation in case of errors. + +#### Construct an insert request + +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest + +""" +Example program to demonstrate inserting data into a Skyflow vault, along with corresponding InsertRequest schema. +""" + +try: + # Initialize Skyflow client + # Step 1: Prepare the data to be inserted into the Skyflow vault + insert_data = [ + # Create the first record with field names and their respective values + { + '': '', # Replace with actual field name and value + '': '', # Replace with actual field name and value + }, + # Create the second record with field names and their respective values + { + '': '', # Replace with actual field name and value + '': '', # Replace with actual field name and value + } + ] + + # Step 2: Build an InsertRequest object with the table name and the data to insert + insert_request = InsertRequest( + table_name='', # Replace with the actual table name in your Skyflow vault + values=insert_data, # Attach the data to be inserted + ) + + # Step 3: Use the Skyflow client to perform the insert operation + insert_response = skyflow_client.vault('').insert(insert_request) + # Replace with your actual vault ID + + # Print the response from the insert operation + print('Insert Response: ', insert_response) + +# Step 5: Handle any exceptions that occur during the insert operation +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +#### Insert call example with `continue_on_error` option +The `continue_on_error` flag is a boolean that determines whether insert operation should proceed despite encountering partial errors. Set to `True` to allow the process to continue even if some errors occur. ```python from skyflow.error import SkyflowError from skyflow.vault.data import InsertRequest +""" +This example demonstrates how to insert sensitive data (e.g., card information) into a Skyflow vault using the Skyflow client. + +1. Initializes the Skyflow client. +2. Prepares a record with sensitive data (e.g., card number and cardholder name). +3. Creates an insert request for inserting the data into the Skyflow vault. +4. Specifies options to continue on error and return tokens. +5. Prints the response of the insert operation. +""" + try: + # Initialize Skyflow client + # Step 1: Initialize a list to hold the data records to be inserted into the vault insert_data = [ - {'card_number': '4111111111111111'}, - {'card_numbe': '4111111111111111'}, # Intentional typo card_numbe + # Step 2: Create the first record with card number and cardholder name + { + 'card_number': '4111111111111111', # Replace with actual card number (sensitive data) + 'cardholder_name': 'John Doe', # Replace with actual cardholder name (sensitive data) + }, + # Step 3: Create the second record with card number and cardholder name + { + 'card_number': '4111111111111111', # Ensure field name matches ("card_number") + 'cardholder_name': 'Jane Doe', # Replace with actual cardholder name (sensitive data) + } ] + # Step 4: Build the InsertRequest object with the data records to insert insert_request = InsertRequest( - table_name = 'table1', - values = insert_data, - return_tokens = True, # returns tokens - continue_on_error = True + table_name='table1', # Specify the table in the vault where the data will be inserted + values=insert_data, # Attach the data (records) to be inserted + return_tokens=True, # Specify if tokens should be returned upon successful insertion + continue_on_error=True # Specify to continue inserting records even if an error occurs for some records ) - response = client.vault('').insert(insert_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 5: Perform the insert operation using the Skyflow client + insert_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').insert(insert_request) + # Replace the vault ID "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID + + # Step 6: Print the response from the insert operation + print('Insert Response: ', insert_response) +except SkyflowError as error: + # Step 7: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` Sample Response @@ -722,9 +634,11 @@ InsertResponse( inserted_fields= [ { - 'skyflow_id': '89c125d1-3bec-4360-b701-a032dda16500', + 'card_number': '5484-7829-1702-9110', 'request_index': 0, - 'card_number': '5479-4229-4622-1393' + 'skyflow_id': '9fac9201-7b8a-4446-93f8-5244e1213bd1', + 'cardholder_name': 'b2308e2a-c1f5-469b-97b7-1f193159399b', + } ], errors= @@ -738,28 +652,57 @@ InsertResponse( ``` -**Insert call example with `upsert` options** +**Insert call example with `upsert` option** +An upsert operation checks for a record based on a unique column's value. If a match exists, the record is updated; otherwise, a new record is inserted. ```python from skyflow.error import SkyflowError from skyflow.vault.data import InsertRequest +""" +This example demonstrates how to insert sensitive data (e.g., card information) into a Skyflow vault using the Skyflow client. + +1. Initializes the Skyflow client. +2. Prepares a record with sensitive data (e.g., card number and cardholder name). +3. Creates an insert request for inserting the data into the Skyflow vault. +4. Specifies the field (cardholder_name) for upsert operations. +5. Prints the response of the insert operation. +""" + try: + # Initialize Skyflow client + # Step 1: Initialize a list to hold the data records for the insert/upsert operation insert_data = [ - {"name": 'sample name'}, + # Step 2: Create a record with the field 'cardholder_name' to insert or upsert + { + 'cardholder_name': 'John Doe', # Replace with the actual cardholder name + } ] + # Step 3: Build the InsertRequest object with the upsertData insert_request = InsertRequest( - table_name = 'table1', - values = insert_data, - return_tokens = True, # returns tokens - upsert = "name" # unique column name + table_name='table1', # Specify the table in the vault where the data will be inserted + values=insert_data, # Attach the data (records) to be inserted + return_tokens=True, # Specify if tokens should be returned upon successful insertion + upsert='cardholder_name' # Specify the field to be used for upsert operations (e.g., cardholder_name) ) - response = client.vault('').insert(insert_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 4: Perform the insert/upsert operation using the Skyflow client + insert_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').insert(insert_request) + # Replace the vault ID "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID + + # Step 5: Print the response from the insert/upsert operation + print('Insert Response: ', insert_response) + +except SkyflowError as error: + # Step 6: Handle any exceptions that may occur during the insert/upsert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` Skyflow returns tokens, with `upsert` support, for the record you just inserted. @@ -769,8 +712,8 @@ InsertResponse( inserted_fields= [ { - 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', - 'name': '3f27b3d7-6bf0-432a-acf9-789c0470e2da' + 'skyflow_id': '9fac9201-7b8a-4446-93f8-5244e1213bd1', + 'name': '73ce45ce-20fd-490e-9310-c1d4f603ee83' } ], errors=[] @@ -779,27 +722,43 @@ InsertResponse( ### Detokenize -To retrieve tokens from your vault, you can use the `detokenize` method. The `DetokenizeRequest` class requires a list of detokenization data to be provided as input. Additionally, the redaction type and continue on error are optional parameters. - +To retrieve tokens from your vault, use the `detokenize` method. The `DetokenizeRequest` class requires a list of detokenization data as input. Additionally, you can provide optional parameters, such as the redaction type and the option to continue on error. +#### Construct a detokenize request ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.tokens import DetokenizeRequest - +""" +This example demonstrates how to detokenize sensitive data from tokens stored in a Skyflow vault, along with corresponding DetokenizeRequest schema. +""" try: - detokenize_data = ['', '', ''] + # Initialize Skyflow client + # Step 1: Step 1: Initialize a list of tokens to be detokenized (replace with actual tokens) + detokenize_data = ['', '', ''] # Replace with your actual token values + # Step 2: Create the DetokenizeRequest object with the tokens and redaction type detokenize_request = DetokenizeRequest( - tokens =d etokenize_data, - continue_on_error = False, # optional - redaction_type = RedactionType.PLAIN_TEXT # optional + tokens=detokenize_data, # Provide the list of tokens to be detokenized + continue_on_error=True, # Continue even if one token cannot be detokenized + redaction_type=RedactionType.PLAIN_TEXT # Specify how the detokenized data should be returned (plain text) ) - response = skyflow_client.vault('').detokenize(detokenize_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Call the Skyflow vault to detokenize the provided tokens + detokenize_response = skyflow_client.vault('').detokenize(detokenize_request) + # Replace with your actual Skyflow vault ID + + # Step 4: Print the detokenization response, which contains the detokenized data + print('Response:', detokenize_response) +# Step 5: Handle any errors that occur during the detokenization process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the exception for debugging purposes ``` Notes: @@ -807,27 +766,49 @@ Notes: - `redaction_type` defaults to `RedactionType.PLAIN_TEXT`. - `continue_on_error` default valus is `False`. -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/detokenize_records.py) of a detokenize call: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/detokenize_records.py) of a detokenize call ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.tokens import DetokenizeRequest - +""" +This example demonstrates how to detokenize sensitive data from tokens stored in a Skyflow vault. + +1. Initializes the Skyflow client. +2. Creates a list of tokens (e.g., credit card tokens) that represent the sensitive data. +3. Builds a detokenization request using the provided tokens and specifies how the redacted data should be returned. +4. Calls the Skyflow vault to detokenize the tokens and retrieves the detokenized data. +5. Prints the detokenization response, which contains the detokenized values or errors. +""" try: - detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-3840'] + # Initialize Skyflow client + # Step 1: Step 1: Initialize a list of tokens to be detokenized (replace with actual tokens) + tokens = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-3840'] # Replace with your actual token values + # Step 2: Create the DetokenizeRequest object with the tokens and redaction type detokenize_request = DetokenizeRequest( - tokens = detokenize_data, - continue_on_error = False, # optional - redaction_type = RedactionType.PLAIN_TEXT # optional + tokens=tokens, # Provide the list of tokens to be detokenized + continue_on_error=False, # Stop the process if any token cannot be detokenized + redaction_type=RedactionType.PLAIN_TEXT # Specify how the detokenized data should be returned (plain text) ) - response = skyflow_client.vault('').detokenize(detokenize_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Call the Skyflow vault to detokenize the provided tokens + detokenize_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').detokenize(detokenize_request) + # Replace "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID + + # Step 4: Print the detokenization response, which contains the detokenized data + print('Response:', detokenize_response) +# Step 5: Handle any errors that occur during the detokenization process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the exception for debugging purposes ``` Sample response: @@ -843,27 +824,49 @@ DetokenizeResponse( ) ``` -An example of a detokenize call with continue_on_error: +#### An example of a detokenize call with `continue_on_error` option: ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.tokens import DetokenizeRequest - +""" +This example demonstrates how to detokenize sensitive data from tokens stored in a Skyflow vault. + +1. Initializes the Skyflow client. +2. Creates a list of tokens (e.g., credit card tokens) that represent the sensitive data. +3. Builds a detokenization request using the provided tokens and specifies how the redacted data should be returned. +4. Calls the Skyflow vault to detokenize the tokens and retrieves the detokenized data. +5. Prints the detokenization response, which contains the detokenized values or errors. +""" try: - detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-384'] + # Initialize Skyflow client + # Step 1: Step 1: Initialize a list of tokens to be detokenized (replace with actual tokens) + tokens = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-3840'] # Replace with your actual token values + # Step 2: Create the DetokenizeRequest object with the tokens and redaction type detokenize_request = DetokenizeRequest( - tokens = detokenize_data, - continue_on_error = True, # optional - redaction_type = RedactionType.PLAIN_TEXT # optional + tokens=tokens, # Provide the list of tokens to be detokenized + continue_on_error=True, # Continue even if some tokens cannot be detokenized + redaction_type=RedactionType.PLAIN_TEXT # Specify how the detokenized data should be returned (plain text) ) - response = skyflow_client.vault('').detokenize(detokenize_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Call the Skyflow vault to detokenize the provided tokens + detokenize_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').detokenize(detokenize_request) + # Replace "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID + + # Step 4: Print the detokenization response, which contains the detokenized data + print('Response:', detokenize_response) +# Step 5: Handle any errors that occur during the detokenization process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the exception for debugging purposes ``` Sample response: @@ -894,39 +897,98 @@ DetokenizeResponse( ### Tokenize -To tokenize data, use the `tokenize` method. The `TokenizeRequest` class is utilized to create a tokenize request. In this request, you specify the `values` parameter, which is a list of dictionaries. Each dictionary contains two keys: `value` and `column_group`. +Tokenization replaces sensitive data with unique identifier tokens. This approach protects sensitive information by securely storing the original data while allowing the use of tokens within your application. + +To tokenize data, use the `tokenize` method. The `TokenizeRequest` class creates a tokenize request. In this request, you specify the values parameter, which is a list of column values objects. Each column value contains two properties: `value` and `column_group`. + +#### Construct a tokenize request ```python +from skyflow.error import SkyflowError from skyflow.vault.tokens import TokenizeRequest -tokenize_request = TokenizeRequest( - values = [{ - 'value': '', - 'column_group': '' - }] -) -``` +""" +This example demonstrates how to tokenize sensitive data (e.g., credit card information) using the Skyflow client, along with corresponding TokenizeRequest schema. +""" +try: + # Initialize Skyflow client + # Step 1: Initialize a list of column values to be tokenized (replace with actual sensitive data) + column_values = [ + # Step 2: Create column values for each sensitive data field (e.g., card number and cardholder name) + {"value": "", "column_group": ""}, # Replace and with actual data + {"value": "", "column_group": ""} # Replace and with actual data + ] + + # Step 3: Build the TokenizeRequest with the column values + tokenize_request = TokenizeRequest( + values=column_values + ) -Sample usage + # Step 4: Call the Skyflow vault to tokenize the sensitive data + tokenize_response = skyflow_client.vault('').tokenize(tokenize_request) + # Replace with your actual Skyflow vault ID + + # Step 5: Print the tokenization response, which contains the generated tokens or errors + print(tokenize_response) + +# Step 6: Handle any errors that occur during the tokenization process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/tokenize_records.py) of a tokenize call: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/tokenize_records.py) of Tokenize call ```python from skyflow.error import SkyflowError from skyflow.vault.tokens import TokenizeRequest +""" +This example demonstrates how to tokenize sensitive data (e.g., credit card information) using the Skyflow client. + +1. Initializes the Skyflow client. +2. Creates a column value for sensitive data (e.g., credit card number). +3. Builds a tokenize request with the column value to be tokenized. +4. Sends the request to the Skyflow vault for tokenization. +5. Prints the tokenization response, which includes the token or errors. +""" try: + # Initialize Skyflow client + # Step 1: Initialize a list of column values to be tokenized (replace with actual sensitive data) + column_values = [ + # Step 2: Create column values for each sensitive data field (e.g., card number and cardholder name) + {"value": "4111111111111111", "column_group": "card_number_cg"}, # Replace and with actual data + ] + + # Step 3: Build the TokenizeRequest with the column values tokenize_request = TokenizeRequest( - values = [{ - "value": '4111111111111111', - "column_group": "card_number_cg" - }] + values=column_values ) - response = client.vault('').tokenize(tokenize_request) - print(response) -except SyntaxError as e: - print('Error Occurred: ', e) + # Step 4: Call the Skyflow vault to tokenize the sensitive data + tokenize_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').tokenize(tokenize_request) + # Replace "9f27764a10f7946fe56b3258e117" with your actual Skyflow vault ID + + # Step 5: Print the tokenization response, which contains the generated tokens or errors + print(tokenize_response) + +# Step 6: Handle any errors that occur during the tokenization process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes + ``` Sample response: @@ -944,58 +1006,131 @@ TokenizeResponse( ### Get -To retrieve data using Skyflow IDs or unique column values, use the `get` method. The `GetRequest` class is used to create a get request, where you specify parameters such as the table name, redaction type, Skyflow IDs, column names, column values, and return tokens. If Skyflow IDs are provided, column names and column values cannot be used. Similarly, if column names or column values are provided, Skyflow IDs cannot be used. +To retrieve data using Skyflow IDs or unique column values, use the get method. The `GetRequest` class creates a get request, where you specify parameters such as the table name, redaction type, Skyflow IDs, column names, column values, and whether to return tokens. If you specify Skyflow IDs, you can't use column names and column values, and the inverse is true—if you specify column names and column values, you can't use Skyflow IDs. + +#### Construct a get request ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.data import GetRequest -GetRequest( - table = '', - ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], - return_tokens = True, - redaction_type = RedactionType.PLAIN_TEXT -) +""" +This example demonstrates how to retrieve data from the Skyflow vault using different methods, along with corresponding GetRequest schema. +""" +try: + # Initialize Skyflow client + # Step 1: Initialize a list of Skyflow IDs to retrieve records (replace with actual Skyflow IDs) + ids = ['', ''] # Replace with actual Skyflow IDs + + # Step 2: Create a GetRequest to retrieve records by Skyflow ID without returning tokens + get_by_id_request = GetRequest( + table='', # Replace with your actual table name + ids=ids, + return_tokens=False, # Set to false to avoid returning tokens + redaction_type=RedactionType.PLAIN_TEXT # Redact data as plain text + ) -# or + # Send the request to the Skyflow vault and retrieve the records + get_by_id_response = skyflow_client.vault('').get(get_by_id_request) + # Replace with your actual Skyflow vault ID + + print(get_by_id_response) + + # Step 3: Create another GetRequest to retrieve records by Skyflow ID with tokenized values + get_tokens_request = GetRequest( + table='', # Replace with your actual table name + ids=ids, + return_tokens=True # Set to True to return tokenized values + ) + + # Send the request to the Skyflow vault and retrieve the tokenized records + get_tokens_response = skyflow_client.vault('').get(get_tokens_request) + print(get_tokens_response) + + column_values = [ + '', # Replace with the actual column value + '' # Replace with the actual column value + ] + + # Step 4: Create a GetRequest to retrieve records based on specific column values + get_by_column_request = GetRequest( + table='', # Replace with the actual table name + column_name='', # Replace with the column name + column_values=column_values, # Add the list of column values to filter by + redaction_type=RedactionType.PLAIN_TEXT # Redact data as plain text + ) + + # Send the request to the Skyflow vault and retrieve the filtered records + get_by_column_response = skyflow_client.vault('').get(get_by_column_request) + print(get_by_column_response) +# Step 5: Handle any errors that occur during the retrieval process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes -GetRequest( - table = '', - column_name ='', - column_values = ['COLUMN_VALUE1>', 'COLUMN_VALUE2>'], - redaction_type = RedactionType.PLAIN_TEXT -) ``` -Sample usage +#### Get by skyflow IDs +Retrieve specific records using skyflow `ids`. Ideal for fetching exact records when IDs are known. -### Get By Column Name and Column Values -The following snippet shows how to use the `get` method using column names and column values. For details, see [get_column_values.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_column_values.py), +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/get_records.py) of a get call to retrieve data using Redaction type: ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.data import GetRequest -try: - column_values = [ - '123456' - ] +""" +This example demonstrates how to retrieve data from the Skyflow vault using a list of Skyflow IDs. - get_request = GetRequest( - table = 'table1', - column_name = 'card_number', # It must be configured as unique in the schema. - column_values = column_values, - redaction_type = RedactionType.PLAIN_TEXT +1. Initializes the Skyflow client with a given vault ID. +2. Creates a request to retrieve records based on Skyflow IDs. +3. Specifies that the response should not return tokens. +4. Uses plain text redaction type for the retrieved records. +5. Prints the response to display the retrieved records. +""" +try: + # Initialize Skyflow client + # Step 1: Initialize a list of Skyflow IDs to retrieve records (replace with actual Skyflow IDs) + ids = ['a581d205-1969-4350-acbe-a2a13eb871a6', '5ff887c3-b334-4294-9acc-70e78ae5164a'] # Replace with actual Skyflow IDs + + # Step 2: Create a GetRequest to retrieve records by Skyflow ID without returning tokens + # The request specifies: + # - `ids`: The list of Skyflow IDs to retrieve + # - `table`: The table from which the records will be retrieved + # - `return_tokens`: Set to false, meaning tokens will not be returned in the response + # - `redaction_type`: Set to PLAIN_TEXT, meaning the retrieved records will have data redacted as plain text + get_by_id_request = GetRequest( + table='table1', # Replace with the actual table name + ids=ids, + return_tokens=False, # Set to false to avoid returning tokens + redaction_type=RedactionType.PLAIN_TEXT # Redact data as plain text ) - response = skyflow_client.vault('').get(get_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Send the request to the Skyflow vault and retrieve the records + get_by_id_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').get(get_by_id_request) + # Replace with actual Vault ID + print(get_by_id_response) # Print the response to the console + +# Step 5: Handle any errors that occur during the retrieval process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` Sample response: @@ -1004,57 +1139,72 @@ Sample response: GetResponse( data=[ { - 'card_number': '123456', - 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968' + 'card_number': '4555555555555553', + 'email': 'john.doe@gmail.com', + 'name': 'john doe', + 'skyflow_id': 'a581d205-1969-4350-acbe-a2a13eb871a6' + }, + { + 'card_number': '4555555555555559', + 'email': 'jane.doe@gmail.com', + 'name': 'jane doe', + 'skyflow_id': '5ff887c3-b334-4294-9acc-70e78ae5164a' } ], errors=[] ) - -``` - -### Get By Skyflow Ids - -```python -from skyflow.error import SkyflowError -from skyflow.utils.enums import RedactionType -from skyflow.vault.data import GetRequest - -GetRequest( - table = '', - ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], - return_tokens = True, - redaction_type = RedactionType.PLAIN_TEXT -) ``` -#### Redaction Types - -There are 4 accepted values in Skyflow.RedactionTypes: +#### Get tokens +Return tokens for records. Ideal for securely processing sensitive data while maintaining data privacy. -- `PLAIN_TEXT` -- `MASKED` -- `REDACTED` -- `DEFAULT` +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/get_records.py) of get call to retrieve tokens using Skyflow IDs: -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_records.py) of get by skyflow ids call: ```python from skyflow.error import SkyflowError from skyflow.utils.enums import RedactionType from skyflow.vault.data import GetRequest +""" +This example demonstrates how to retrieve data from the Skyflow vault and return tokens along with the records. + +1. Initializes the Skyflow client with a given vault ID. +2. Creates a request to retrieve records based on Skyflow IDs and ensures tokens are returned. +3. Prints the response to display the retrieved records along with the tokens. +""" try: - get_request = GetRequest( - table = 'table1', - ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], - redaction_type = RedactionType.PLAIN_TEXT + # Initialize Skyflow client + # Step 1: Initialize a list of Skyflow IDs (replace with actual Skyflow IDs) + ids = ['a581d205-1969-4350-acbe-a2a13eb871a6', '5ff887c3-b334-4294-9acc-70e78ae5164a'] # Replace with actual Skyflow IDs + + # Step 2: Create a GetRequest to retrieve records based on Skyflow IDs + # The request specifies: + # - `ids`: The list of Skyflow IDs to retrieve + # - `table`: The table from which the records will be retrieved + # - `return_tokens`: Set to false, meaning tokens will not be returned in the response + get_tokens_request = GetRequest( + table='table1', # Replace with the actual table name + ids=ids, + return_tokens=True, # Set to false to avoid returning tokens ) - response = client.vault('').get(get_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Send the request to the Skyflow vault and retrieve the records + get_tokens_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').get(get_tokens_request) + # Replace with actual Vault ID + + print(get_tokens_response) # Print the response to the console + +# Step 5: Handle any errors that occur during the retrieval process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` Sample response: @@ -1063,36 +1213,75 @@ Sample response: GetResponse( data=[ { - 'card_number': '4555555555555553', - 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + 'card_number': '3998-2139-0328-0697', + 'email': 'c9a6c9555060@82c092e7.bd52', + 'name': '82c092e7-74c0-4e60-bd52-c9a6c9555060', + 'skyflow_id': 'a581d205-1969-4350-acbe-a2a13eb871a6' }, { - 'card_number': '4555555555555559', - 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + 'card_number': '3562-0140-8820-7499', + 'email': '6174366e2bc6@59f82e89.93fc', + 'name': '59f82e89-138e-4f9b-93fc-6174366e2bc6', + 'skyflow_id': '5ff887c3-b334-4294-9acc-70e78ae5164a' } ], errors=[] ) - ``` -The following snippet shows how to use the `get()` method with return_tokens true. +#### Get by column name and column values +Retrieve records by unique column values. Ideal for querying data without knowing Skyflow IDs, using alternate unique identifiers. + +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/get_column_values.py) of get call to retrieve data using column name and column values: ```python from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType from skyflow.vault.data import GetRequest +""" +This example demonstrates how to retrieve data from the Skyflow vault based on column values. + +1. Initializes the Skyflow client with a given vault ID. +2. Creates a request to retrieve records based on specific column values (e.g., email addresses). +3. Prints the response to display the retrieved records after redacting sensitive data based on the specified redaction type. +""" try: - get_request = GetRequest( - table = 'table1', - ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], - return_tokens = True + # Initialize Skyflow client + # Step 1: Initialize a list of column values (email addresses in this case) + column_values = [ + 'john.doe@gmail.com', # Example email address + 'jane.doe@gmail.com' # Example email address + ] + + # Step 2: Step 2: Create a GetRequest to retrieve records based on column values + # The request specifies: + # - `ids`: The list of Skyflow IDs to retrieve + # - `table`: The table from which the records will be retrieved + # - `return_tokens`: Set to false, meaning tokens will not be returned in the response + get_by_column_request = GetRequest( + table='table1', # Replace with the actual table name + column_name='email', # The column name to filter by (e.g., "email") + column_values=column_values, # The list of column values to match + redaction_type=RedactionType.PLAIN_TEXT # Set the redaction type (e.g., PLAIN_TEXT) ) - response = client.vault('').get(get_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 3: Send the request to the Skyflow vault and retrieve the records + get_by_column_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').get(get_by_column_request) + # Replace with actual Vault ID + + print(get_by_column_response) # Print the response to the console + +# Step 5: Handle any errors that occur during the retrieval process +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` @@ -1102,76 +1291,164 @@ Sample response: GetResponse( data=[ { - 'card_number': '3562-0140-8820-7499', - 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + 'card_number': '4555555555555553', + 'email': 'john.doe@gmail.com', + 'name': 'john doe', + 'skyflow_id': 'a581d205-1969-4350-acbe-a2a13eb871a6' }, { - 'card_number': '3998-2139-0328-0697', - 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + 'card_number': '4555555555555559', + 'email': 'jane.doe@gmail.com', + 'name': 'jane doe', + 'skyflow_id': '5ff887c3-b334-4294-9acc-70e78ae5164a' } ], errors=[] ) ``` +#### Redaction Types +Redaction types determine how sensitive data is displayed when retrieved from the vault. + +**Available Redaction Types** + +- `DEFAULT`: Applies the vault-configured default redaction setting. +- `DEFAULT`: Completely removes sensitive data from view. +- `MASKED`: Partially obscures sensitive information. +- `PLAIN_TEXT`: Displays the full, unmasked data. + +**Choosing the Right Redaction Type** +- Use `REDACTED` for scenarios requiring maximum data protection to prevent exposure of sensitive information. +- Use `MASKED` to provide partial visibility of sensitive data for less critical use cases. +- Use `PLAIN_TEXT` for internal, authorized access where full data visibility is necessary. + ### Update To update data in your vault, use the `update` method. The `UpdateRequest` class is used to create an update request, where you specify parameters such as the table name, data (as a dictionary), tokens, return_tokens, and token_strict. If `return_tokens` is set to True, Skyflow returns tokens for the updated records. If `return_tokens` is set to False, Skyflow returns IDs for the updated records. +#### Construct an update request + ```python from skyflow.error import SkyflowError +from skyflow.utils.enums import TokenMode from skyflow.vault.data import UpdateRequest +""" +This example demonstrates how to update records in the Skyflow vault by providing new data and/or tokenized values, along with the corresponding UpdateRequest schema. +""" + try: + # Initialize Skyflow client + # Step 1: Prepare the data to update in the vault + # Use a dictionary to store the data that will be updated in the specified table update_data = { - 'skyflow_id': '', - '': '' + 'skyflow_id': '', # Skyflow ID for identifying the record to update + '': '', # Example of a column name and its value to update + '': '' # Another example of a column name and its value to update + } + + # Step 2: Prepare the tokens (if necessary) for certain columns that require tokenization + # Use a dictionary to specify columns that need tokens in the update request + tokens = { + '': '' # Example of a column name that should be tokenized } + # Step 3: Create an UpdateRequest to specify the update operation + # The request includes the table name, data, tokens, and the returnTokens flag update_request = UpdateRequest( - table='TABLE_NAME', - data=update_data + table='', # Replace with the actual table name to update + token_mode=TokenMode.ENABLE, # Specifies the tokenization mode (ENABLE means tokenization is applied) + data=update_data, # The data to update in the record + tokens=tokens, # The tokens associated with specific columns + return_tokens=True # Specify whether to return tokens in the response ) - response = skyflow_client.vault('VAULT_ID').update(update_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) -``` + # Step 4: Send the request to the Skyflow vault and update the record + update_response = skyflow_client.vault('').update(update_request) # Replace with actual Vault ID + + # Step 5: Print the response to confirm the update result + print(update_response) + +except SkyflowError as error: + # Step 6: Handle any errors that occur during the update operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) -Sample usage +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` -The following snippet shows how to use the `update()` method. For details, see [update_record.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/update_record.py), +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/update_record.py) of update call ```python from skyflow.error import SkyflowError +from skyflow.utils.enums import TokenMode from skyflow.vault.data import UpdateRequest +""" +This example demonstrates how to update a record in the Skyflow vault with specified data and tokens. + +1. Initializes the Skyflow client with a given vault ID. +2. Constructs an update request with data to modify and tokens to include. +3. Sends the request to update the record in the vault. +4. Prints the response to confirm the success or failure of the update operation. +""" + try: + # Initialize Skyflow client + # Step 1: Prepare the data to update in the vault + # Use a dictionary to store the data that will be updated in the specified table update_data = { - 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', - 'card_number': '4111111111117777' + 'skyflow_id': '5b699e2c-4301-4f9f-bcff-0a8fd3057413', # Skyflow ID for identifying the record to update + 'name': 'john doe', # Example of a column name and its value to update + 'card_number': '4111111111111115' # Another example of a column name and its value to update } + # Step 2: Prepare the tokens (if necessary) for certain columns that require tokenization + # Use a dictionary to specify columns that need tokens in the update request + tokens = { + 'name': '72b8ffe3-c8d3-4b4f-8052-38b2a7405b5a' # Example of a column name that should be tokenized + } + + # Step 3: Create an UpdateRequest to specify the update operation + # The request includes the table name, data, tokens, and the returnTokens flag update_request = UpdateRequest( - table = 'table1', - data = update_data + table='table1', # Replace with the actual table name to update + token_mode=TokenMode.ENABLE, # Token mode enabled to allow tokenization of sensitive data + data=update_data, # The data to update in the record + tokens=tokens, # The tokenized values for sensitive columns ) - response = skyflow_client.vault('').update(update_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) + # Step 4: Send the request to the Skyflow vault and update the record + update_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').update(update_request) # Replace with actual Vault ID + + # Step 5: Print the response to confirm the update result + print(update_response) + +except SkyflowError as error: + # Step 6: Handle any errors that occur during the update operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` Sample response -`return_tokens` set to `True` +- When `return_tokens` is set to `True` ```python UpdateResponse( updated_field={ - 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', + 'skyflow_id': '5b699e2c-4301-4f9f-bcff-0a8fd3057413', + 'name': '72b8ffe3-c8d3-4b4f-8052-38b2a7405b5a', 'card_number': '4131-1751-0217-8491' }, errors=[] @@ -1179,11 +1456,11 @@ UpdateResponse( ``` -`return_tokens` set to `False` +- When `return_tokens` is set to `False` ```python UpdateResponse( - updated_field={'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315'}, + updated_field={'skyflow_id': '5b699e2c-4301-4f9f-bcff-0a8fd3057413'}, errors=[] ) @@ -1193,44 +1470,84 @@ UpdateResponse( To delete records using Skyflow IDs, use the `delete` method. The `DeleteRequest` class accepts a list of Skyflow IDs that you want to delete, as shown below: +#### Construct a delete request + ```python from skyflow.error import SkyflowError from skyflow.vault.data import DeleteRequest -primary_delete_ids = [ - 'SKYFLOW_ID1', - 'SKYFLOW_ID2', - 'SKYFLOW_ID3', -] +""" +This example demonstrates how to delete records from a Skyflow vault using specified Skyflow IDs, along with corresponding DeleteRequest schema. +""" -delete_request = DeleteRequest( - table = '', - ids = primary_delete_ids -) +try: + # Initialize Skyflow client + # Step 1: Prepare a list of Skyflow IDs for the records to delete + # The list stores the Skyflow IDs of the records that need to be deleted from the vault + delete_ids = ['', '', ''] # Replace with actual Skyflow IDs + + # Step 2: Create a DeleteRequest to define the delete operation + # The request specifies the table from which to delete the records and the IDs of the records to delete + delete_request = DeleteRequest( + table='', # Replace with the actual table name from which to delete + ids=delete_ids # List of Skyflow IDs to delete + ) + + # Step 3: Send the delete request to the Skyflow vault + delete_response = skyflow_client.vault('').delete(delete_request) # Replace with your actual Vault ID + print(delete_response) # Print the response to confirm the delete result + +except SkyflowError as error: + # Step 4: Handle any exceptions that occur during the delete operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the exception stack trace for debugging purposes ``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/delete_records.py) of delete call: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/delete_records.py) of delete call ```python from skyflow.error import SkyflowError from skyflow.vault.data import DeleteRequest +""" +This example demonstrates how to delete records from a Skyflow vault using specified Skyflow IDs. + +1. Initializes the Skyflow client with a given Vault ID. +2. Constructs a delete request by specifying the IDs of the records to delete. +3. Sends the delete request to the Skyflow vault to delete the specified records. +4. Prints the response to confirm the success or failure of the delete operation. +""" + try: - delete_ids = [ - '77e093f8-3ace-4295-8683-bb6745d6178e', - 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' - ] + # Initialize Skyflow client + # Step 1: Prepare a list of Skyflow IDs for the records to delete + # The list stores the Skyflow IDs of the records that need to be deleted from the vault + delete_ids = ['9cbf66df-6357-48f3-b77b-0f1acbb69280', 'ea74bef4-f27e-46fe-b6a0-a28e91b4477b', '47700796-6d3b-4b54-9153-3973e281cafb'] # Replace with actual Skyflow IDs + # Step 2: Create a DeleteRequest to define the delete operation + # The request specifies the table from which to delete the records and the IDs of the records to delete delete_request = DeleteRequest( - table='table1', - ids=delete_ids + table='table1', # Replace with the actual table name from which to delete + ids=delete_ids # List of Skyflow IDs to delete ) - response = client.vault('').delete(delete_request) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) - + # Step 3: Send the delete request to the Skyflow vault + delete_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').delete(delete_request) # Replace with your actual Vault ID + print(delete_response) # Print the response to confirm the delete result +# Step 4: Handle any exceptions that occur during the delete operation +except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the exception stack trace for debugging purposes ``` Sample response: @@ -1238,46 +1555,199 @@ Sample response: ```python DeleteResponse( deleted_ids=[ - '77e093f8-3ace-4295-8683-bb6745d6178e', - 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' + '9cbf66df-6357-48f3-b77b-0f1acbb69280', + 'ea74bef4-f27e-46fe-b6a0-a28e91b4477b', + '47700796-6d3b-4b54-9153-3973e281cafb' ], errors=[] ) ``` -### Invoke Connection +### Query + +To retrieve data with SQL queries, use the `query` method. `QueryRequest` is class that takes the `query` parameter as follows: -Using Skyflow Connection, end-user applications can integrate checkout/card issuance flow with their apps/systems. To invoke connection, use the `invoke` method of the Skyflow client. +#### Construct a query request +Refer to [Query your data](https://docs.skyflow.com/query-data/) and [Execute Query](https://docs.skyflow.com/record/#QueryService_ExecuteQuery) for guidelines and restrictions on supported SQL statements, operators, and keywords. ```python from skyflow.error import SkyflowError -from skyflow.vault.connection import InvokeConnectionRequest +from skyflow.vault.data import QueryRequest -body = { - 'KEY1': 'VALUE1', - 'KEY2': 'VALUE2' -} -headers = { - 'KEY1': 'VALUE1' -} -path_params = { - 'KEY1': 'VALUE1' -} -query_params = { - 'KEY1': 'VALUE1' -} +""" +This example demonstrates how to execute a custom SQL query on a Skyflow vault, along with QueryRequest schema. +""" + +try: + # Initialize Skyflow client + # Step 1: Define the SQL query to execute on the Skyflow vault + # Replace "" with the actual SQL query you want to run + query = '' # Example: "SELECT * FROM table1 WHERE column1 = 'value'" + + # Step 2: Create a QueryRequest with the specified SQL query + query_request = QueryRequest( + query=query # SQL query to execute + ) + + # Step 3: Execute the query request on the specified Skyflow vault + query_response = skyflow_client.vault('').query(query_request) # Replace with your actual Vault ID + + # Step 4: Print the response containing the query results + print('Query Result:', query_response) + +except SkyflowError as error: + # Step 5: Handle any exceptions that occur during the query execution + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + # Handle any unexpected errors during execution + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/query_records.py) of query call -invoke_connection_request = InvokeConnectionRequest( - method = Method.POST, - body = body, - headers = headers, # optional - path_params = path_params, # optional - query_params = query_params # optional +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import QueryRequest + +""" +This example demonstrates how to execute a SQL query on a Skyflow vault to retrieve data. + +1. Initializes the Skyflow client with the Vault ID. +2. Constructs a query request with a specified SQL query. +3. Executes the query against the Skyflow vault. +4. Prints the response from the query execution. +""" + +try: + # Initialize Skyflow client + # Step 1: Define the SQL query + # Example query: Retrieve all records from the "cards" table with a specific skyflow_id + query = "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" # Example: "SELECT * FROM table1 WHERE column1 = 'value'" + + # Step 2: Create a QueryRequest with the SQL query + query_request = QueryRequest( + query=query # SQL query to execute + ) + + # Step 3: Execute the query request on the specified Skyflow vault + query_response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').query(query_request) # Vault ID: 9f27764a10f7946fe56b3258e117 + + # Step 4: Print the response containing the query results + print(query_response) + +except SkyflowError as error: + # Step 5: Handle any exceptions that occur during the query execution + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +Sample Response + +```python +QueryResponse( + fields=[ + { + 'card_number': 'XXXXXXXXXXXX1112', + 'name': 'S***ar', + 'skyflow_id': '3ea3861-x107-40w8-la98-106sp08ea83f', + 'tokenized_data': {} + } + ], + errors=[] ) ``` -`methodName` supports the following methods: +### Connections + +Skyflow Connections is a gateway service that uses tokenization to securely send and receive data between your systems and first- or third-party services. The [connections](https://github.com/skyflowapi/skyflow-python/tree/v2/skyflow/vault/connection) module invokes both inbound and/or outbound connections. +- **Inbound connections**: Act as intermediaries between your client and server, tokenizing sensitive data before it reaches your backend, ensuring downstream services handle only tokenized data. +- **Outbound connections**: Enable secure extraction of data from the vault and transfer it to third-party services via your backend server, such as processing checkout or card issuance flows. + +#### Invoke a connection +To invoke a connection, use the `invoke` method of the Skyflow client. +#### Construct an invoke connection request + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RequestMethod +from skyflow.vault.connection import InvokeConnectionRequest + +""" +This example demonstrates how to invoke an external connection using the Skyflow SDK, along with corresponding InvokeConnectionRequest schema. +""" + +try: + # Initialize Skyflow client + # Step 1: Define the request body parameters + # These are the values you want to send in the request body + request_body = { + '': '', + '': '' + } + + # Step 2: Define the request headers + # Add any required headers that need to be sent with the request + request_headers = { + '': '', + '': '', + } + + # Step 3: Define the path parameters + # Path parameters are part of the URL and typically used in RESTful APIs + path_params = { + '': '', + '': '' + } + + # Step 4: Define the query parameters + # Query parameters are included in the URL after a '?' and are used to filter or modify the response + query_params = { + '': '', + '': '', + } + + # Step 5: Build the InvokeConnectionRequest using the provided parameters + invoke_connection_request = InvokeConnectionRequest( + method=RequestMethod.POST, # The HTTP method to use for the request (POST in this case) + body=request_body, # The body of the request + headers=request_headers, # The headers to include in the request + path_params=path_params, # The path parameters for the URL + query_params=query_params # The query parameters to append to the URL + ) + + # Step 6: Invoke the connection using the request + # Replace '' with the actual connection ID you are using + response = skyflow_client.connection('').invoke(invoke_connection_request) + + # Step 7: Print the response from the invoked connection + # This response contains the result of the request sent to the external system + print('Connection invocation successful: ', response) + +except SkyflowError as error: + # Step 8: Handle any exceptions that occur during the connection invocation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + # Print the exception stack trace for debugging + print('Unexpected Error:', error) + +``` + +`method` supports the following methods: - GET - POST @@ -1285,47 +1755,86 @@ invoke_connection_request = InvokeConnectionRequest( - PATCH - DELETE -**path_params, query_params, request_header, request_body** are the JSON objects represented as dictionaries that will be sent through the connection integration url. +**path_params, query_params, header, body** are the JSON objects represented as dictionaries that will be sent through the connection integration url. -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/invoke_connection.py) of invoke_connection: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/invoke_connection.py) of Invoke Connection ```python -from skyflow import Skyflow -from skyflow import LogLevel -from skyflow.utils.enums import Method +from skyflow import Skyflow, LogLevel from skyflow.error import SkyflowError +from skyflow.utils.enums import RequestMethod from skyflow.vault.connection import InvokeConnectionRequest -credentials = { - 'path': '/path/to/credentials.json', -} +""" +This example demonstrates how to invoke an external connection using the Skyflow SDK. +It configures a connection, sets up the request, and sends a POST request to the external service. -client = ( - Skyflow.builder() - .add_connection_config({ - 'connection_id': '', - 'connection_url': '', - 'credentials': credentials - }) - .set_log_level(LogLevel.OFF) - .build() -) +1. Initialize Skyflow client with connection details. +2. Define the request body, headers, and method. +3. Execute the connection request. +4. Print the response from the invoked connection. +""" -invoke_connection_request = InvokeConnectionRequest( - method=Method.POST, - body={ - 'card_number': '4337-1696-5866-0865', - 'ssn': '524-41-4248' - }, - headers = { - 'Content-Type': 'application/json' +try: + # Initialize Skyflow client + # Step 1: Set up credentials and connection configuration + # Load credentials from a JSON file (you need to provide the correct path) + credentials = { + 'path': '/path/to/credentials.json' } -) -response = client.connection('').invoke(invoke_connection_request) + # Define the connection configuration (URL and credentials) + connection_config = { + 'connection_id': '', # Replace with actual connection ID + 'connection_url': 'https://connection.url.com', # Replace with actual connection URL + 'credentials': credentials # Set credentials for the connection + } -print(response) + # Initialize the Skyflow client with the connection configuration + skyflow_client = ( + Skyflow.builder() + .add_connection_config(connection_config) # Add connection configuration to client + .set_log_level(LogLevel.DEBUG) # Set log level to DEBUG for detailed logs + .build() # Build the Skyflow client instance + ) + # Step 2: Define the request body and headers + request_body = { + 'card_number': '4337-1696-5866-0865', # Example card number + 'ssn': '524-41-4248' # Example SSN + } + + # Add any required headers that need to be sent with the request + request_headers = { + 'Content-Type': 'application/json', # Set content type for the request + } + + # Step 3: Build the InvokeConnectionRequest with required parameters + # Set HTTP method to POST, include the request body and headers + invoke_connection_request = InvokeConnectionRequest( + method=RequestMethod.POST, # The HTTP method to use for the request (POST in this case) + body=request_body, # The body of the request + headers=request_headers, # The headers to include in the request + ) + + # Step 4: Invoke the connection using the request + # Replace '' with the actual connection ID you are using + response = skyflow_client.connection('').invoke(invoke_connection_request) + + # Step 5: Print the response from the invoked connection + # This response contains the result of the request sent to the external system + print('Connection invocation successful: ', response) + +except SkyflowError as error: + # Step 6: Handle any exceptions that occur during the connection invocation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + # Print the exception stack trace for debugging + print('Unexpected Error:', error) ``` Sample response: @@ -1335,126 +1844,376 @@ ConnectionResponse( { 'card_number': '4337-1696-5866-0865', 'ssn': '524-41-4248', - 'request_id': '84796a11-0b7d-4cb0-a348-cf9fefb5886f,84796a11-0b7d-4cb0-a348-cf9fefb5886f' + 'request_id': '4a3453b5-7aa4-4373-98d7-cf102b1f6f97' } ) ``` -### Query +### Authenticate with bearer tokens +This section covers methods for generating and managing tokens to authenticate API calls: -To retrieve data with SQL queries, use the `query` method. `QueryRequest` is class that takes the `query` parameter as follows: +- **Generate a bearer token:** +Enable the creation of bearer tokens using service account credentials. These tokens, valid for 60 minutes, provide secure access to Vault services and management APIs based on the service account's permissions. Use this for general API calls when you only need basic authentication without additional context or role-based restrictions. +- **Generate a bearer token with context:** +Support embedding context values into bearer tokens, enabling dynamic access control and the ability to track end-user identity. These tokens include context claims and allow flexible authorization for Vault services. Use this when policies depend on specific contextual attributes or when tracking end-user identity is required. +- **Generate a scoped bearer token:** +Facilitate the creation of bearer tokens with role-specific access, ensuring permissions are limited to the operations allowed by the designated role. This is particularly useful for service accounts with multiple roles. Use this to enforce fine-grained role-based access control, ensuring tokens only grant permissions for a specific role. +- **Generate signed data tokens:** +Add an extra layer of security by digitally signing data tokens with the service account's private key. These signed tokens can be securely detokenized, provided the necessary bearer token and permissions are available. Use this to add cryptographic protection to sensitive data, enabling secure detokenization with verified integrity and authenticity. -```python -from skyflow.vault.data import QueryRequest +#### Generate a bearer token +The [Service Account](https://github.com/skyflowapi/skyflow-python/tree/v2/skyflow/service_account) Python package generates service account tokens using a service account credentials file, which is provided when a service account is created. The tokens generated by this module are valid for 60 minutes and can be used to make API calls to the [Data](https://docs.skyflow.com/record/) and [Management](https://docs.skyflow.com/management/) APIs, depending on the permissions assigned to the service account. -query_request = QueryRequest( - query= '' -) -``` - -See [Query your data](https://docs.skyflow.com/query-data/) and [Execute Query](https://docs.skyflow.com/record/#QueryService_ExecuteQuery) for guidelines and restrictions on supported SQL statements, operators, and keywords. +The `generate_bearer_token(filepath)` function takes the credentials file path for token generation, alternatively, you can also send the entire credentials as string, by using `generate_bearer_token_from_creds(credentials)` -An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/query_records.py) of Query call: +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/token_generation_example.py): ```python +import json from skyflow.error import SkyflowError -from skyflow.vault.data import QueryRequest - -query_request = QueryRequest( - query = "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, ) +# Example program to generate a Bearer Token using Skyflow's service account utilities. +# The token can be generated in two ways: +# 1. Using the file path to a credentials.json file. +# 2. Using the JSON content of the credentials file as a string. + +# Variable to store the generated token +bearer_token = '' + +# Example 1: Generate Bearer Token using a credentials.json file +try: + # Specify the full file path to the credentials.json file + file_path = 'CREDENTIALS_FILE_PATH' + + # Check if the token is already generated and still valid + if not is_expired(bearer_token): + print("Generated Bearer Token (from file):", bearer_token) + else: + # Generate a new Bearer Token from the credentials file + token, _ = generate_bearer_token(file_path) # Set credentials from the file path + bearer_token = token + # Print the generated Bearer Token to the console + print("Generated Bearer Token (from file):", bearer_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating token from file path: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating token from file path: {e}") + +# Example 2: Generate Bearer Token using the credentials JSON string try: - skyflow_client.vault('').query(query_request) -except SkyflowError as e: - if e.data: - print(e.data) + # Provide the credentials JSON content as a string + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + + # Convert credentials dictionary to JSON string + credentials_string = json.dumps(skyflow_credentials) + + # Check if the token is either not initialized or has expired + if not is_expired(bearer_token): + print("Generated Bearer Token (from string):", bearer_token) else: - print(e.message) + # Generate a new Bearer Token from the credentials string + token, _ = generate_bearer_token_from_creds(credentials_string) + bearer_token = token + print("Generated Bearer Token (from string):", bearer_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating token from credentials string: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating token from credentials string: {e}") ``` -Sample Response +#### Generate bearer tokens with context +**Context-aware authorization** embeds context values into a bearer token during its generation and so you can reference those values in your policies. This enables more flexible access controls, such as helping you track end-user identity when making API calls using service accounts, and facilitates using signed data tokens during detokenization. + +A service account with the context_id identifier generates bearer tokens containing context information, represented as a JWT claim in a Skyflow-generated bearer token. Tokens generated from such service accounts include a context_identifier claim, are valid for 60 minutes, and can be used to make API calls to the Data and Management APIs, depending on the service account's permissions. +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/token_generation_with_context_example.py): ```python -QueryResponse( - fields=[ - { - 'card_number': 'XXXXXXXXXXXX1112', - 'name': 'S***ar', - 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968', - 'tokenized_data': {} - } - ], - errors=[] +import json +from skyflow.error import SkyflowError +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, ) -``` -## Logging +""" +Example program to generate a Bearer Token using Skyflow's BearerToken utility. +The token is generated using two approaches: +1. By providing the credentials.json file path. +2. By providing the contents of credentials.json as a string. +""" -The skyflow python SDK provides useful logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(log_level)` as shown below: +# Variable to store the generated token +bearer_token = '' -```python -from skyflow import Skyflow -from skyflow import LogLevel -from skyflow import Env +# Approach 1: Generate Bearer Token by specifying the path to the credentials.json file +try: + # Replace with the full path to your credentials.json file + file_path = 'YOUR_CREDENTIALS_FILE_PATH' -# To generate Bearer Token from credentials string. -skyflow_credentials = { + # Set context string (example: "abc") + options = {'ctx': 'abc'} + + # Check if the token is already generated and still valid + if not is_expired(bearer_token): + print("Generated Bearer Token (from file):", bearer_token) + else: + # Generate a new Bearer Token from the credentials file + token, _ = generate_bearer_token(file_path, options) # Set credentials from the file path and options + bearer_token = token + # Print the generated Bearer Token to the console + print("Generated Bearer Token (from file):", bearer_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating token from file path: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating token from file path: {e}") + +# Approach 2: Generate Bearer Token by specifying the contents of credentials.json as a string +try: + # Provide the credentials JSON content as a string + skyflow_credentials = { 'clientID': '', 'clientName': '', 'tokenURI': '', 'keyID': '', 'privateKey': '', } -credentials_string = json.dumps(skyflow_credentials) -# Pass one of api_key, token, credentials_string & path as credentials -credentials = { - 'token': 'BEARER_TOKEN', # bearer token - # api_key: "API_KEY", # API_KEY - # path: "PATH", # path to credentials file - # credentials_string: credentials_string, # credentials as string -} + # Convert credentials dictionary to JSON string + credentials_string = json.dumps(skyflow_credentials) -client = ( - Skyflow.builder() - .add_vault_config({ - 'vault_id': 'VAULT_ID', # primary vault - 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com - 'env': Env.PROD, # Env by default it is set to PROD - 'credentials': credentials # individual credentials - }) - .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed - .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR - .build() + # Set context string (example: "abc") + options = {'ctx': 'abc'} + + # Check if the token is either not initialized or has expired + if not is_expired(bearer_token): + print("Generated Bearer Token (from string):", bearer_token) + else: + # Generate a new Bearer Token from the credentials string and options + token, _ = generate_bearer_token_from_creds(credentials_string, options) + bearer_token = token + print("Generated Bearer Token (from string):", bearer_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating token from file path: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating token from credentials string: {e}") +``` + +#### Generate scoped bearer tokens +A service account with multiple roles can generate bearer tokens with access limited to a specific role by specifying the appropriate roleID. This can be used to limit access to specific roles for services with multiple responsibilities, such as segregating access for billing and analytics. The generated bearer tokens are valid for 60 minutes and can only execute operations permitted by the permissions associated with the designated role. + +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/scoped_token_generation_example.py): +```python +import json +from skyflow.error import SkyflowError +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, ) + +""" +Example program to generate a Scoped Token using Skyflow's BearerToken utility. +The token is generated by providing the file path to the credentials.json file +and specifying roles associated with the token. +""" + +# Variable to store the generated token +scoped_token = '' + +# Example: Generate Scoped Token by specifying the credentials.json file path +try: + # Specify the full file path to the service account's credentials.json file + file_path = 'YOUR_CREDENTIALS_FILE_PATH' + + # Set context string (example: "abc") + options = {'role_ids': ['ROLE_ID']} + + # Check if the token is already generated and still valid + if not is_expired(scoped_token): + print("Generated Bearer Token (from file):", scoped_token) + else: + # Generate a new Bearer Token from the credentials file and associated roles + scoped_token, _ = generate_bearer_token(file_path, options) # Set credentials from the file path and options + # Print the generated Bearer Token to the console + print("Generated Bearer Token (from file):", scoped_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating token from file path: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating token from file path: {e}") ``` -Current the following 5 log levels are supported: +#### Generate signed data tokens +Skyflow generates data tokens when sensitive data is inserted into the vault. These data tokens can be digitally signed with a service account's private key, adding an extra layer of protection. Signed tokens can only be detokenized by providing the signed data token along with a bearer token generated from the service account's credentials. The service account must have the necessary permissions and context to successfully detokenize the signed data tokens. -- `DEBUG`: +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/signed_token_generation_example.py): +```python +import json +from skyflow.error import SkyflowError +from skyflow.service_account import ( + generate_signed_data_tokens, + generate_signed_data_tokens_from_creds, +) - When `LogLevel.DEBUG` is passed, all level of logs will be printed(DEBUG, INFO, WARN, ERROR) +# Example program to generate Signed Data Tokens using Skyflow's utilities. +# Signed Data Tokens can be generated in two ways: +# 1. By specifying the file path to the credentials.json file. +# 2. By providing the credentials as a JSON string. -- `INFO`: +# Example 1: Generate Signed Data Tokens using a credentials file +try: + # File path to the service account's credentials.json file + file_path = "CREDENTIALS_FILE_PATH" + + # Options for generating signed data tokens + options = { + "ctx": "CONTEX_ID", # Set the context value + "data_tokens": ["DATA_TOKEN1", "DATA_TOKEN2"], # Set the data tokens to be signed + "time_to_live": 30, # Set the data tokens to be signed + } + + # Generate and retrieve the signed data tokens + data_token, signed_data_token = generate_signed_data_tokens(file_path, options) + # Print the signed data tokens to the console + print("Signed Data Tokens (from file):", data_token, signed_data_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating signed token from file path: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating signed token from file path: {e}") + +# Example 2: Generate Signed Data Tokens using credentials as a JSON string +try: + # JSON object containing Skyflow credentials + skyflow_credentials = { + "clientID": "", + "clientName": "", + "tokenURI": "", + "keyID": "", + "privateKey": "", + } - When `LogLevel.INFO` is passed, INFO logs for every event that has occurred during the SDK flow execution will be printed along with WARN and ERROR logs + # Convert credentials dictionary to JSON string + credentials_string = json.dumps(skyflow_credentials) -- `WARN`: + options = { + "ctx": "CONTEX_ID", # Context value associated with the token + "data_tokens": ["DATA_TOKEN1", "DATA_TOKEN2"], # Set the data tokens to be signed + "time_to_live": 30, # Set the token's time-to-live (TTL) in seconds + } - When `LogLevel.WARN` is passed, WARN and ERROR logs will be printed + # Generate and retrieve the signed data tokens + data_token, signed_data_token = generate_signed_data_tokens_from_creds(credentials_string, options) + # Print the signed data tokens to the console + print("Signed Data Tokens (from string):", data_token, signed_data_token) +except SkyflowError as error: + # Handle any exceptions encountered during the token generation process + print(f"Error generating signed token from credentials string: {error}") +except Exception as e: + # Handle any other unexpected exceptions + print(f"Error generating signed token from credentials string: {e}") -- `ERROR`: +``` - When `LogLevel.ERROR` is passed, only ERROR logs will be printed. +Notes: +- The `time_to_live` (TTL) value should be specified in seconds. +- By default, the TTL value is set to 60 seconds. +## Logging + +The SDK provides logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(log_level)` as shown below: + +Currently, the following five log levels are supported: +- `DEBUG`: +When `LogLevel.DEBUG` is passed, logs at all levels will be printed (DEBUG, INFO, WARN, ERROR). +- `INFO`: +When `LogLevel.INFO` is passed, INFO logs for every event that occurs during SDK flow execution will be printed, along with WARN and ERROR logs. +- `WARN`: +When `LogLevel.WARN` is passed, only WARN and ERROR logs will be printed. +- `ERROR`: +When `LogLevel.ERROR` is passed, only ERROR logs will be printed. - `OFF`: +`LogLevel.OFF` can be used to turn off all logging from the Skyflow Python SDK. + +**Note:** The ranking of logging levels is as follows: `DEBUG` < `INFO` < `WARN` < `ERROR` < `OFF`. + +```python +import json +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env + +""" +This example demonstrates how to configure the Skyflow client with custom log levels and authentication credentials (either token, credentials string, or other methods). It also shows how to configure a vault connection using specific parameters. +1. Set up credentials with a Bearer token or credentials string. +2. Define the Vault configuration. +3. Build the Skyflow client with the chosen configuration and set log level. +4. Example of changing the log level from ERROR (default) to INFO. +""" + +# Step 1: Set up credentials - either pass token or use credentials string +# In this case, we are using a Bearer token for authentication +credentials = { + 'token': '', # Replace with actual Bearer token +} + +# Step 2: Define the Vault configuration +# Configure the vault with necessary details like vault ID, cluster ID, and environment +vault_config = { + 'vault_id': '', # Replace with actual Vault ID (primary vault) + 'cluster_id': '', # Replace with actual Cluster ID (from vault URL) + 'env': Env.PROD, # Set the environment (default is PROD) + 'credentials': credentials # Set credentials for the vault (either token or credentials) +} + +# Step 3: Define additional Skyflow credentials (optional, if needed for credentials string) +skyflow_credentials = { + 'clientID': '', # Replace with your client ID + 'clientName': '', # Replace with your client name + 'tokenURI': '', # Replace with your token URI + 'keyID': '', # Replace with your key ID + 'privateKey': '', # Replace with your private key +} - `LogLevel.OFF` can be used to turn off all logging from the Skyflow SDK. +# Convert the credentials object to a json string format to be used for generating a Bearer Token +credentials_string = json.dumps(skyflow_credentials) # Set credentials string -`Note`: The ranking of logging levels is as follows : `DEBUG` < `INFO` < `WARN` < `ERROR` < `OFF` +# Step 4: Build the Skyflow client with the chosen configuration and log level +skyflow_client = ( + Skyflow.builder() + .add_vault_config(vault_config) # Add the Vault configuration + .add_skyflow_credentials(skyflow_credentials) # Use Skyflow credentials if no token is passed + .set_log_level(LogLevel.INFO) # Set log level to INFO (default is ERROR) + .build() # Build the Skyflow client +) + +# Now, the Skyflow client is ready to use with the specified log level and credentials +print('Skyflow client has been successfully configured with log level: INFO.') +``` ## Reporting a Vulnerability -If you discover a potential security issue in this project, please reach out to us at security@skyflow.com. Please do not create public GitHub issues or Pull Requests, as malicious actors could potentially view them. +If you discover a potential security issue in this project, please reach out to us at **security@skyflow.com**. Please do not create public GitHub issues or Pull Requests, as malicious actors could potentially view them. From b096bcf52bec20f20b610395aef0b755a4cdf1bc Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 4 Mar 2025 18:21:07 +0530 Subject: [PATCH 12/60] SK-1906 Improve debugging in connections --- skyflow/utils/_utils.py | 5 +++++ skyflow/vault/controller/_connections.py | 3 ++- tests/vault/controller/test__connection.py | 20 ++++++++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 5002956a..d3c21071 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -341,6 +341,11 @@ def parse_invoke_connection_response(api_response: requests.Response): if 'x-request-id' in api_response.headers: message += ' - request id: ' + api_response.headers['x-request-id'] + + if 'error-from-client' in api_response.headers: + error_from_client = api_response.headers['error-from-client'] + details = [{ "error_from_client": error_from_client }] + raise SkyflowError(message, status_code, details=details) raise SkyflowError(message, status_code) diff --git a/skyflow/vault/controller/_connections.py b/skyflow/vault/controller/_connections.py index 2fc52f11..9f067d92 100644 --- a/skyflow/vault/controller/_connections.py +++ b/skyflow/vault/controller/_connections.py @@ -27,7 +27,7 @@ def invoke(self, request: InvokeConnectionRequest): invoke_connection_request.headers['sky-metadata'] = json.dumps(get_metrics()) - log_info(SkyflowMessages.Info.INVOKE_CONNECTION_TRIGGERED, self.__vault_client.get_logger()) + log_info(SkyflowMessages.Info.INVOKE_CONNECTION_TRIGGERED.value, self.__vault_client.get_logger()) try: response = session.send(invoke_connection_request) @@ -36,5 +36,6 @@ def invoke(self, request: InvokeConnectionRequest): return invoke_connection_response except Exception as e: + if isinstance(e, SkyflowError): raise e raise SkyflowError(SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value, SkyflowMessages.ErrorCodes.SERVER_ERROR.value) \ No newline at end of file diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index 0bd3d293..5b5a106d 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -100,5 +100,25 @@ def test_invoke_request_error(self, mock_send): with self.assertRaises(SkyflowError) as context: self.connection.invoke(request) self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) + self.assertTrue(context.exception.details['error_from_client']) + @patch('requests.Session.send') + def test_invoke_request_error_from_client(self, mock_send): + mock_response = Mock() + mock_response.status_code = FAILURE_STATUS_CODE + mock_response.content = ERROR_RESPONSE_CONTENT + mock_response.headers = {'error-from-client': True} + mock_send.return_value = mock_response + + request = InvokeConnectionRequest( + method=RequestMethod.POST, + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) + self.assertTrue(context.exception.details['error_from_client']) From 74628e555634735b4f8c3645c309036a8c287b2b Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Tue, 4 Mar 2025 22:25:09 +0530 Subject: [PATCH 13/60] SK-1906: Fix invoke connection test case --- tests/vault/controller/test__connection.py | 36 ++++++++++------------ 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index 5b5a106d..9723b91a 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -1,8 +1,8 @@ import unittest from unittest.mock import Mock, patch - +import requests from skyflow.error import SkyflowError -from skyflow.utils import SkyflowMessages +from skyflow.utils import SkyflowMessages, parse_invoke_connection_response from skyflow.utils.enums import RequestMethod from skyflow.vault.connection import InvokeConnectionRequest from skyflow.vault.controller import Connection @@ -21,7 +21,7 @@ INVALID_HEADERS = "invalid_headers" INVALID_BODY = "invalid_body" FAILURE_STATUS_CODE = 400 -ERROR_RESPONSE_CONTENT = '{"error": {"message": "error occurred"}}' +ERROR_RESPONSE_CONTENT = b'{"error": {"message": "Invalid Request"}}' class TestConnection(unittest.TestCase): def setUp(self): @@ -100,25 +100,21 @@ def test_invoke_request_error(self, mock_send): with self.assertRaises(SkyflowError) as context: self.connection.invoke(request) self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) - self.assertTrue(context.exception.details['error_from_client']) - @patch('requests.Session.send') - def test_invoke_request_error_from_client(self, mock_send): - mock_response = Mock() + def test_parse_invoke_connection_response_error_from_client(self): + mock_response = Mock(spec=requests.Response) mock_response.status_code = FAILURE_STATUS_CODE mock_response.content = ERROR_RESPONSE_CONTENT - mock_response.headers = {'error-from-client': True} - mock_send.return_value = mock_response - - request = InvokeConnectionRequest( - method=RequestMethod.POST, - body=VALID_BODY, - path_params=VALID_PATH_PARAMS, - headers=VALID_HEADERS, - query_params=VALID_QUERY_PARAMS - ) + mock_response.headers = { + 'error-from-client': 'true', + 'x-request-id': '12345' + } + mock_response.raise_for_status.side_effect = requests.HTTPError() with self.assertRaises(SkyflowError) as context: - self.connection.invoke(request) - self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) - self.assertTrue(context.exception.details['error_from_client']) + parse_invoke_connection_response(mock_response) + + exception = context.exception + + self.assertTrue(any(detail.get('error_from_client') == 'true' for detail in exception.details)) + From 9cd613b979ac0788910521a690fa427a8ae241c6 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Thu, 6 Mar 2025 21:05:55 +0530 Subject: [PATCH 14/60] SK-1934 Fix inconsistent error handling for invoke connections --- skyflow/utils/_skyflow_messages.py | 1 + skyflow/utils/_utils.py | 39 +++++++++++----------- skyflow/vault/controller/_connections.py | 3 +- tests/utils/test__utils.py | 5 +-- tests/vault/controller/test__connection.py | 11 ++++-- 5 files changed, 34 insertions(+), 25 deletions(-) diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index 954c5e14..26ca4a25 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -275,6 +275,7 @@ class ErrorLogs(Enum): UPDATE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Update request resulted in failure." QUERY_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Query request resulted in failure." GET_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Get request resulted in failure." + INVOKE_CONNECTION_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Invoke connection request resulted in failure." class Interface(Enum): INSERT = "INSERT" diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index d3c21071..2261d3e6 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -327,29 +327,30 @@ def parse_invoke_connection_response(api_response: requests.Response): invoke_connection_response.response = json_content return invoke_connection_response - except: + except Exception as e: raise SkyflowError(SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content), status_code) except HTTPError: - message = SkyflowMessages.Error.API_ERROR.value.format(status_code) - if api_response and api_response.content: - try: - error_response = json.loads(content) - if isinstance(error_response.get('error'), dict) and 'message' in error_response['error']: - message = error_response['error']['message'] - except json.JSONDecodeError: - message = SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content) - - if 'x-request-id' in api_response.headers: - message += ' - request id: ' + api_response.headers['x-request-id'] - - if 'error-from-client' in api_response.headers: - error_from_client = api_response.headers['error-from-client'] - details = [{ "error_from_client": error_from_client }] - raise SkyflowError(message, status_code, details=details) - + message = SkyflowMessages.Error.API_ERROR.value.format(status_code) + try: + error_response = json.loads(content) + request_id = api_response.headers['x-request-id'] + error_from_client = api_response.headers.get('error-from-client') + + status_code = error_response.get('error', {}).get('http_code', 500) # Default to 500 if not found + http_status = error_response.get('error', {}).get('http_status') + grpc_code = error_response.get('error', {}).get('grpc_code') + details = error_response.get('error', {}).get('details') + message = error_response.get('error', {}).get('message', "An unknown error occurred.") + + if error_from_client is not None: + if details is None: details = [] + details.append({'error_from_client': error_from_client}) + + raise SkyflowError(message, status_code, request_id, grpc_code, http_status, details) + except json.JSONDecodeError: + message = SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content) raise SkyflowError(message, status_code) - def log_and_reject_error(description, status_code, request_id, http_status=None, grpc_code=None, details=None, logger = None): raise SkyflowError(description, status_code, request_id, grpc_code, http_status, details) diff --git a/skyflow/vault/controller/_connections.py b/skyflow/vault/controller/_connections.py index 9f067d92..81c6ea10 100644 --- a/skyflow/vault/controller/_connections.py +++ b/skyflow/vault/controller/_connections.py @@ -3,7 +3,7 @@ from skyflow.error import SkyflowError from skyflow.utils import construct_invoke_connection_request, SkyflowMessages, get_metrics, \ parse_invoke_connection_response -from skyflow.utils.logger import log_info +from skyflow.utils.logger import log_info, log_error_log from skyflow.vault.connection import InvokeConnectionRequest @@ -36,6 +36,7 @@ def invoke(self, request: InvokeConnectionRequest): return invoke_connection_response except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.INVOKE_CONNECTION_REQUEST_REJECTED.value, self.__vault_client.get_logger()) if isinstance(e, SkyflowError): raise e raise SkyflowError(SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value, SkyflowMessages.ErrorCodes.SERVER_ERROR.value) \ No newline at end of file diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index c9010c98..e70afc0e 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -344,7 +344,8 @@ def test_parse_invoke_connection_response_http_error_with_json_error_message(sel with self.assertRaises(SkyflowError) as context: parse_invoke_connection_response(mock_response) - self.assertEqual(context.exception.message, "Not Found - request id: 1234") + self.assertEqual(context.exception.message, "Not Found") + self.assertEqual(context.exception.request_id, "1234") @patch("requests.Response") def test_parse_invoke_connection_response_http_error_without_json_error_message(self, mock_response): @@ -357,7 +358,7 @@ def test_parse_invoke_connection_response_http_error_without_json_error_message( with self.assertRaises(SkyflowError) as context: parse_invoke_connection_response(mock_response) - self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Internal Server Error") + " - request id: 1234") + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Internal Server Error")) @patch("skyflow.utils._utils.log_and_reject_error") def test_handle_exception_json_error(self, mock_log_and_reject_error): diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index 9723b91a..61be3163 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -4,6 +4,7 @@ from skyflow.error import SkyflowError from skyflow.utils import SkyflowMessages, parse_invoke_connection_response from skyflow.utils.enums import RequestMethod +from skyflow.utils._version import SDK_VERSION from skyflow.vault.connection import InvokeConnectionRequest from skyflow.vault.controller import Connection @@ -21,7 +22,8 @@ INVALID_HEADERS = "invalid_headers" INVALID_BODY = "invalid_body" FAILURE_STATUS_CODE = 400 -ERROR_RESPONSE_CONTENT = b'{"error": {"message": "Invalid Request"}}' +ERROR_RESPONSE_CONTENT = '"message": "Invalid Request"' +ERROR_FROM_CLIENT_RESPONSE_CONTENT = b'{"error": {"message": "Invalid Request"}}' class TestConnection(unittest.TestCase): def setUp(self): @@ -99,12 +101,14 @@ def test_invoke_request_error(self, mock_send): with self.assertRaises(SkyflowError) as context: self.connection.invoke(request) - self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) + self.assertEqual(context.exception.message, f'Skyflow Python SDK {SDK_VERSION} Response {ERROR_RESPONSE_CONTENT} is not valid JSON.') + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(ERROR_RESPONSE_CONTENT)) + self.assertEqual(context.exception.http_code, 400) def test_parse_invoke_connection_response_error_from_client(self): mock_response = Mock(spec=requests.Response) mock_response.status_code = FAILURE_STATUS_CODE - mock_response.content = ERROR_RESPONSE_CONTENT + mock_response.content = ERROR_FROM_CLIENT_RESPONSE_CONTENT mock_response.headers = { 'error-from-client': 'true', 'x-request-id': '12345' @@ -117,4 +121,5 @@ def test_parse_invoke_connection_response_error_from_client(self): exception = context.exception self.assertTrue(any(detail.get('error_from_client') == 'true' for detail in exception.details)) + self.assertEqual(exception.request_id, '12345') From 6ee188371cd806f59b3da9403a8027afde048bb9 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Mon, 10 Mar 2025 16:04:25 +0530 Subject: [PATCH 15/60] SK-1874: Support for the combination of tokens and redaction type in detokenize API (#164) * SK-1874: Support for the combination of tokens and redaction type in the Detokenize API. (#156) --- samples/vault_api/detokenize_records.py | 15 +++++++++++---- setup.py | 2 +- skyflow/utils/_skyflow_messages.py | 6 ++++-- skyflow/utils/_version.py | 2 +- skyflow/utils/validations/_validations.py | 21 +++++++++++++++------ skyflow/vault/controller/_vault.py | 4 ++-- skyflow/vault/tokens/_detokenize_request.py | 5 ++--- tests/vault/controller/test__vault.py | 12 ++++++++++-- 8 files changed, 46 insertions(+), 21 deletions(-) diff --git a/samples/vault_api/detokenize_records.py b/samples/vault_api/detokenize_records.py index b76aa89e..e93d5a18 100644 --- a/samples/vault_api/detokenize_records.py +++ b/samples/vault_api/detokenize_records.py @@ -52,13 +52,20 @@ def perform_detokenization(): ) # Step 4: Prepare Detokenization Data - detokenize_data = ['token1', 'token2', 'token3'] # Tokens to be detokenized - redaction_type = RedactionType.REDACTED + detokenize_data = [ + { + 'token': '', # Token to be detokenized + 'redaction': RedactionType.REDACTED + }, + { + 'token': '', # Token to be detokenized + 'redaction': RedactionType.MASKED + } + ] # Create Detokenize Request detokenize_request = DetokenizeRequest( - tokens=detokenize_data, - redaction_type=redaction_type, + data=detokenize_data, continue_on_error=True # Continue processing on errors ) diff --git a/setup.py b/setup.py index 55c45d4f..c7b17c3d 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b1' +current_version = '2.0.0b1.dev0+dcb5ddc' setup( name='skyflow', diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index 954c5e14..71fc76f9 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -101,7 +101,7 @@ class Error(Enum): INVOKE_CONNECTION_FAILED = f"{error_prefix} Invoke Connection operation failed." INVALID_IDS_TYPE = f"{error_prefix} Validation error. 'ids' has a value of type {{}}. Specify 'ids' as list." - INVALID_REDACTION_TYPE = f"{error_prefix} Validation error. 'redaction' has a value of type {{}}. Specify 'redaction' as type Skyflow.Redaction." + INVALID_REDACTION_TYPE = f"{error_prefix} Validation error. 'redaction' has a value of type {{}}. Specify 'redaction' as type Skyflow.RedactionType." INVALID_COLUMN_NAME = f"{error_prefix} Validation error. 'column' has a value of type {{}}. Specify 'column' as a string." INVALID_COLUMN_VALUE = f"{error_prefix} Validation error. columnValues key has a value of type {{}}. Specify columnValues key as list." INVALID_FIELDS_VALUE = f"{error_prefix} Validation error. fields key has a value of type{{}}. Specify fields key as list." @@ -117,8 +117,10 @@ class Error(Enum): UPDATE_FIELD_KEY_ERROR = f"{error_prefix} Validation error. Fields are empty in an update payload. Specify at least one field." INVALID_FIELDS_TYPE = f"{error_prefix} Validation error. The 'data' key has a value of type {{}}. Specify 'data' as a dictionary." IDS_KEY_ERROR = f"{error_prefix} Validation error. 'ids' key is missing from the payload. Specify an 'ids' key." - INVALID_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. The 'tokens' key has a value of type {{}}. Specify 'tokens' as a list." + INVALID_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. The 'data' field is invalid. Specify 'data' as a list of dictionaries containing 'token' and 'redaction'." + INVALID_DATA_FOR_DETOKENIZE = f"{error_prefix}" EMPTY_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. Tokens are empty in detokenize payload. Specify at lease one token" + INVALID_TOKEN_TYPE = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens should be of type string." INVALID_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. The 'values' key has a value of type {{}}. Specify 'tokenize_parameters' as a list." EMPTY_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. Tokenize values are empty in tokenize payload. Specify at least one parameter." diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 64d4c6b5..a17f3809 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b1' \ No newline at end of file +SDK_VERSION = '2.0.0b1.dev0+dcb5ddc' \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index c3026e75..5b7827a9 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -502,19 +502,28 @@ def validate_update_request(logger, request): invalid_input_error_code) def validate_detokenize_request(logger, request): - if not isinstance(request.redaction_type, RedactionType): - raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(request.redaction_type)), invalid_input_error_code) - if not isinstance(request.continue_on_error, bool): raise SkyflowError(SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value, invalid_input_error_code) - if not len(request.tokens): + if not isinstance(request.data, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.data)), invalid_input_error_code) + + if not len(request.data): log_error_log(SkyflowMessages.ErrorLogs.TOKENS_REQUIRED.value.format("DETOKENIZE"), logger = logger) log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value.format("DETOKENIZE"), logger = logger) raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value, invalid_input_error_code) - if not isinstance(request.tokens, list): - raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.tokens)), invalid_input_error_code) + for item in request.data: + if 'token' not in item or 'redaction' not in item: + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.data)), invalid_input_error_code) + token = item.get('token') + redaction = item.get('redaction') + + if not isinstance(token, str) or not token: + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_TYPE.value.format("DETOKENIZE"), invalid_input_error_code) + + if not isinstance(redaction, RedactionType) or not redaction: + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(redaction)), invalid_input_error_code) def validate_tokenize_request(logger, request): parameters = request.values diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index ee6a4ae5..9867443f 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -230,8 +230,8 @@ def detokenize(self, request: DetokenizeRequest): log_info(SkyflowMessages.Info.DETOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() tokens_list = [ - V1DetokenizeRecordRequest(token=token, redaction=request.redaction_type.value) - for token in request.tokens + V1DetokenizeRecordRequest(token=item.get('token'), redaction=item.get('redaction').value) + for item in request.data ] payload = V1DetokenizePayload(detokenization_parameters=tokens_list, continue_on_error=request.continue_on_error) tokens_api = self.__vault_client.get_tokens_api() diff --git a/skyflow/vault/tokens/_detokenize_request.py b/skyflow/vault/tokens/_detokenize_request.py index 5e3bc041..73a5368e 100644 --- a/skyflow/vault/tokens/_detokenize_request.py +++ b/skyflow/vault/tokens/_detokenize_request.py @@ -1,7 +1,6 @@ from skyflow.utils.enums.redaction_type import RedactionType class DetokenizeRequest: - def __init__(self, tokens, redaction_type = RedactionType.PLAIN_TEXT, continue_on_error = False): - self.tokens = tokens - self.redaction_type = redaction_type + def __init__(self, data, continue_on_error = False): + self.data = data self.continue_on_error = continue_on_error \ No newline at end of file diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 0d2ea3d8..6e0805e0 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -455,8 +455,16 @@ def test_query_successful(self, mock_parse_response, mock_validate): @patch("skyflow.vault.controller._vault.parse_detokenize_response") def test_detokenize_successful(self, mock_parse_response, mock_validate): request = DetokenizeRequest( - tokens=["token1", "token2"], - redaction_type=RedactionType.PLAIN_TEXT, + data=[ + { + 'token': 'token1', + 'redaction': RedactionType.PLAIN_TEXT + }, + { + 'token': 'token2', + 'redaction': RedactionType.PLAIN_TEXT + } + ], continue_on_error=False ) From 5a63cc9d9fa311fb683e06217eb96385a6cd5211 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Mon, 10 Mar 2025 11:53:52 +0000 Subject: [PATCH 16/60] [AUTOMATED] Private Release 2.0.0b1.dev0+3d4ee51 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index c7b17c3d..9d68a420 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b1.dev0+dcb5ddc' +current_version = '2.0.0b1.dev0+3d4ee51' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index a17f3809..522115a4 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b1.dev0+dcb5ddc' \ No newline at end of file +SDK_VERSION = '2.0.0b1.dev0+3d4ee51' \ No newline at end of file From 4e15606aaa8dbfa908a6694ce74032a22c91e8d6 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Thu, 13 Mar 2025 15:15:01 +0530 Subject: [PATCH 17/60] SK-1772: Update enums (#167) --- skyflow/utils/_utils.py | 2 +- skyflow/utils/enums/env.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 5002956a..e0d8783d 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -61,7 +61,7 @@ def get_vault_url(cluster_id, env,vault_id, logger = None): raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) base_url = EnvUrls[env.name].value - protocol = "https" if env != Env.PROD else "http" + protocol = "https" return f"{protocol}://{cluster_id}.{base_url}" diff --git a/skyflow/utils/enums/env.py b/skyflow/utils/enums/env.py index 862f8f8a..1f2f7f17 100644 --- a/skyflow/utils/enums/env.py +++ b/skyflow/utils/enums/env.py @@ -1,13 +1,13 @@ from enum import Enum class Env(Enum): - DEV = 'DEV', - SANDBOX = 'SANDBOX', + DEV = 'DEV' + SANDBOX = 'SANDBOX' PROD = 'PROD' STAGE = 'STAGE' class EnvUrls(Enum): - PROD = "vault.skyflowapis.com", - SANDBOX = "vault.skyflowapis-preview.com", + PROD = "vault.skyflowapis.com" + SANDBOX = "vault.skyflowapis-preview.com" DEV = "vault.skyflowapis.dev" STAGE = "vault.skyflowapis.tech" \ No newline at end of file From f760bc0979da6e5e8736a01731a3ccd262b2e9fa Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Mon, 17 Mar 2025 18:51:00 +0530 Subject: [PATCH 18/60] SK-1908: Migrate Python SDK from openapi generator to Fern SDK generator (#168) * SK-1908: Migrate from openapi generator to Fern --- ci-scripts/bump_version.sh | 2 + requirements.txt | 3 +- setup.py | 27 +- skyflow/client/skyflow.py | 2 +- skyflow/generated/rest/__init__.py | 217 +- skyflow/generated/rest/api/__init__.py | 9 - skyflow/generated/rest/api/audit_api.py | 848 ----- .../generated/rest/api/authentication_api.py | 319 -- skyflow/generated/rest/api/bin_lookup_api.py | 315 -- skyflow/generated/rest/api/query_api.py | 330 -- skyflow/generated/rest/api/records_api.py | 3310 ----------------- skyflow/generated/rest/api/tokens_api.py | 623 ---- skyflow/generated/rest/api_client.py | 789 ---- skyflow/generated/rest/api_response.py | 21 - skyflow/generated/rest/audit/__init__.py | 19 + skyflow/generated/rest/audit/client.py | 509 +++ .../generated/rest/audit/types/__init__.py | 27 + ...t_events_request_filter_ops_action_type.py | 27 + ..._request_filter_ops_context_access_type.py | 7 + ...s_request_filter_ops_context_actor_type.py | 7 + ...ts_request_filter_ops_context_auth_mode.py | 7 + ...events_request_filter_ops_resource_type.py | 39 + ..._audit_events_request_sort_ops_order_by.py | 5 + .../generated/rest/authentication/__init__.py | 2 + .../generated/rest/authentication/client.py | 264 ++ skyflow/generated/rest/bin_lookup/__init__.py | 2 + skyflow/generated/rest/bin_lookup/client.py | 204 + skyflow/generated/rest/client.py | 160 + skyflow/generated/rest/configuration.py | 464 --- skyflow/generated/rest/core/__init__.py | 47 + skyflow/generated/rest/core/api_error.py | 15 + skyflow/generated/rest/core/client_wrapper.py | 76 + skyflow/generated/rest/core/datetime_utils.py | 28 + skyflow/generated/rest/core/file.py | 67 + skyflow/generated/rest/core/http_client.py | 499 +++ .../generated/rest/core/jsonable_encoder.py | 101 + .../generated/rest/core/pydantic_utilities.py | 296 ++ skyflow/generated/rest/core/query_encoder.py | 58 + .../rest/core/remove_none_from_dict.py | 11 + .../generated/rest/core/request_options.py | 35 + skyflow/generated/rest/core/serialization.py | 272 ++ skyflow/generated/rest/environment.py | 8 + skyflow/generated/rest/errors/__init__.py | 7 + .../rest/errors/bad_request_error.py | 9 + .../generated/rest/errors/not_found_error.py | 9 + .../rest/errors/unauthorized_error.py | 9 + skyflow/generated/rest/exceptions.py | 200 - skyflow/generated/rest/models/__init__.py | 70 - .../models/audit_event_audit_resource_type.py | 66 - .../rest/models/audit_event_context.py | 113 - .../generated/rest/models/audit_event_data.py | 88 - .../rest/models/audit_event_http_info.py | 90 - .../rest/models/batch_record_method.py | 41 - .../rest/models/context_access_type.py | 39 - .../rest/models/context_auth_mode.py | 40 - .../detokenize_record_response_value_type.py | 45 - .../generated/rest/models/googlerpc_status.py | 100 - skyflow/generated/rest/models/protobuf_any.py | 101 - .../query_service_execute_query_body.py | 88 - .../record_service_batch_operation_body.py | 101 - .../record_service_bulk_delete_record_body.py | 88 - .../record_service_insert_record_body.py | 105 - .../record_service_update_record_body.py | 97 - .../rest/models/redaction_enum_redaction.py | 40 - .../rest/models/request_action_type.py | 54 - .../rest/models/v1_audit_after_options.py | 90 - .../rest/models/v1_audit_event_response.py | 98 - .../rest/models/v1_audit_response.py | 102 - .../rest/models/v1_audit_response_event.py | 110 - .../models/v1_audit_response_event_request.py | 114 - .../models/v1_batch_operation_response.py | 90 - .../generated/rest/models/v1_batch_record.py | 108 - .../rest/models/v1_bin_list_request.py | 98 - .../rest/models/v1_bin_list_response.py | 96 - .../models/v1_bulk_delete_record_response.py | 88 - .../models/v1_bulk_get_record_response.py | 96 - skyflow/generated/rest/models/v1_byot.py | 39 - skyflow/generated/rest/models/v1_card.py | 104 - .../rest/models/v1_delete_file_response.py | 90 - .../rest/models/v1_delete_record_response.py | 90 - .../rest/models/v1_detokenize_payload.py | 100 - .../models/v1_detokenize_record_request.py | 91 - .../models/v1_detokenize_record_response.py | 95 - .../rest/models/v1_detokenize_response.py | 96 - .../generated/rest/models/v1_field_records.py | 90 - .../rest/models/v1_file_av_scan_status.py | 45 - .../rest/models/v1_get_auth_token_request.py | 98 - .../rest/models/v1_get_auth_token_response.py | 90 - .../v1_get_file_scan_status_response.py | 89 - .../rest/models/v1_get_query_response.py | 96 - .../rest/models/v1_insert_record_response.py | 96 - .../generated/rest/models/v1_member_type.py | 39 - .../rest/models/v1_record_meta_properties.py | 90 - .../rest/models/v1_tokenize_payload.py | 96 - .../rest/models/v1_tokenize_record_request.py | 90 - .../models/v1_tokenize_record_response.py | 88 - .../rest/models/v1_tokenize_response.py | 96 - .../rest/models/v1_update_record_response.py | 90 - .../rest/models/v1_vault_field_mapping.py | 92 - .../rest/models/v1_vault_schema_config.py | 96 - skyflow/generated/rest/query/__init__.py | 2 + skyflow/generated/rest/query/client.py | 181 + skyflow/generated/rest/records/__init__.py | 13 + skyflow/generated/rest/records/client.py | 1978 ++++++++++ .../generated/rest/records/types/__init__.py | 11 + ...ervice_bulk_get_record_request_order_by.py | 5 + ...rvice_bulk_get_record_request_redaction.py | 7 + ...rd_service_get_record_request_redaction.py | 7 + skyflow/generated/rest/rest.py | 258 -- skyflow/generated/rest/tokens/__init__.py | 2 + skyflow/generated/rest/tokens/client.py | 395 ++ skyflow/generated/rest/types/__init__.py | 91 + .../types/audit_event_audit_resource_type.py | 39 + .../rest/types/audit_event_context.py | 90 + .../generated/rest/types/audit_event_data.py | 26 + .../rest/types/audit_event_http_info.py | 29 + .../rest/types/batch_record_method.py | 5 + .../rest/types/context_access_type.py | 5 + .../generated/rest/types/context_auth_mode.py | 5 + .../detokenize_record_response_value_type.py | 7 + .../generated/rest/types/googlerpc_status.py | 22 + skyflow/generated/rest/types/protobuf_any.py | 21 + .../rest/types/redaction_enum_redaction.py | 5 + .../rest/types/request_action_type.py | 27 + .../rest/types/v_1_audit_after_options.py | 31 + .../rest/types/v_1_audit_event_response.py | 38 + .../rest/types/v_1_audit_response.py | 28 + .../rest/types/v_1_audit_response_event.py | 50 + .../types/v_1_audit_response_event_request.py | 67 + .../types/v_1_batch_operation_response.py | 33 + .../generated/rest/types/v_1_batch_record.py | 69 + .../rest/types/v_1_bin_list_response.py | 27 + .../types/v_1_bulk_delete_record_response.py | 26 + .../types/v_1_bulk_get_record_response.py | 23 + skyflow/generated/rest/types/v_1_byot.py | 5 + skyflow/generated/rest/types/v_1_card.py | 68 + .../rest/types/v_1_delete_file_response.py | 27 + .../rest/types/v_1_delete_record_response.py | 27 + .../types/v_1_detokenize_record_request.py | 25 + .../types/v_1_detokenize_record_response.py | 38 + .../rest/types/v_1_detokenize_response.py | 23 + .../generated/rest/types/v_1_field_records.py | 31 + .../rest/types/v_1_file_av_scan_status.py | 18 + .../rest/types/v_1_get_auth_token_response.py | 33 + .../v_1_get_file_scan_status_response.py | 20 + .../rest/types/v_1_get_query_response.py | 23 + .../rest/types/v_1_insert_record_response.py | 23 + .../generated/rest/types/v_1_member_type.py | 5 + .../rest/types/v_1_record_meta_properties.py | 27 + .../rest/types/v_1_tokenize_record_request.py | 31 + .../types/v_1_tokenize_record_response.py | 22 + .../rest/types/v_1_tokenize_response.py | 23 + .../rest/types/v_1_update_record_response.py | 27 + .../rest/types/v_1_vault_field_mapping.py | 36 + .../rest/types/v_1_vault_schema_config.py | 34 + skyflow/generated/rest/version.py | 1 + skyflow/service_account/_utils.py | 4 +- skyflow/service_account/client/auth_client.py | 13 +- skyflow/utils/_utils.py | 5 +- skyflow/utils/constants.py | 2 + skyflow/utils/enums/env.py | 8 +- skyflow/utils/enums/redaction_type.py | 9 +- skyflow/utils/enums/token_mode.py | 7 +- skyflow/utils/validations/_validations.py | 16 +- skyflow/vault/client/client.py | 16 +- skyflow/vault/controller/_vault.py | 96 +- skyflow/vault/tokens/_detokenize_request.py | 2 - tests/client/test_skyflow.py | 4 +- tests/vault/client/test__client.py | 49 +- tests/vault/controller/test__vault.py | 228 +- 170 files changed, 7150 insertions(+), 12346 deletions(-) delete mode 100644 skyflow/generated/rest/api/__init__.py delete mode 100644 skyflow/generated/rest/api/audit_api.py delete mode 100644 skyflow/generated/rest/api/authentication_api.py delete mode 100644 skyflow/generated/rest/api/bin_lookup_api.py delete mode 100644 skyflow/generated/rest/api/query_api.py delete mode 100644 skyflow/generated/rest/api/records_api.py delete mode 100644 skyflow/generated/rest/api/tokens_api.py delete mode 100644 skyflow/generated/rest/api_client.py delete mode 100644 skyflow/generated/rest/api_response.py create mode 100644 skyflow/generated/rest/audit/__init__.py create mode 100644 skyflow/generated/rest/audit/client.py create mode 100644 skyflow/generated/rest/audit/types/__init__.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_action_type.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_access_type.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_actor_type.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_auth_mode.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_resource_type.py create mode 100644 skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_sort_ops_order_by.py create mode 100644 skyflow/generated/rest/authentication/__init__.py create mode 100644 skyflow/generated/rest/authentication/client.py create mode 100644 skyflow/generated/rest/bin_lookup/__init__.py create mode 100644 skyflow/generated/rest/bin_lookup/client.py create mode 100644 skyflow/generated/rest/client.py delete mode 100644 skyflow/generated/rest/configuration.py create mode 100644 skyflow/generated/rest/core/__init__.py create mode 100644 skyflow/generated/rest/core/api_error.py create mode 100644 skyflow/generated/rest/core/client_wrapper.py create mode 100644 skyflow/generated/rest/core/datetime_utils.py create mode 100644 skyflow/generated/rest/core/file.py create mode 100644 skyflow/generated/rest/core/http_client.py create mode 100644 skyflow/generated/rest/core/jsonable_encoder.py create mode 100644 skyflow/generated/rest/core/pydantic_utilities.py create mode 100644 skyflow/generated/rest/core/query_encoder.py create mode 100644 skyflow/generated/rest/core/remove_none_from_dict.py create mode 100644 skyflow/generated/rest/core/request_options.py create mode 100644 skyflow/generated/rest/core/serialization.py create mode 100644 skyflow/generated/rest/environment.py create mode 100644 skyflow/generated/rest/errors/__init__.py create mode 100644 skyflow/generated/rest/errors/bad_request_error.py create mode 100644 skyflow/generated/rest/errors/not_found_error.py create mode 100644 skyflow/generated/rest/errors/unauthorized_error.py delete mode 100644 skyflow/generated/rest/exceptions.py delete mode 100644 skyflow/generated/rest/models/__init__.py delete mode 100644 skyflow/generated/rest/models/audit_event_audit_resource_type.py delete mode 100644 skyflow/generated/rest/models/audit_event_context.py delete mode 100644 skyflow/generated/rest/models/audit_event_data.py delete mode 100644 skyflow/generated/rest/models/audit_event_http_info.py delete mode 100644 skyflow/generated/rest/models/batch_record_method.py delete mode 100644 skyflow/generated/rest/models/context_access_type.py delete mode 100644 skyflow/generated/rest/models/context_auth_mode.py delete mode 100644 skyflow/generated/rest/models/detokenize_record_response_value_type.py delete mode 100644 skyflow/generated/rest/models/googlerpc_status.py delete mode 100644 skyflow/generated/rest/models/protobuf_any.py delete mode 100644 skyflow/generated/rest/models/query_service_execute_query_body.py delete mode 100644 skyflow/generated/rest/models/record_service_batch_operation_body.py delete mode 100644 skyflow/generated/rest/models/record_service_bulk_delete_record_body.py delete mode 100644 skyflow/generated/rest/models/record_service_insert_record_body.py delete mode 100644 skyflow/generated/rest/models/record_service_update_record_body.py delete mode 100644 skyflow/generated/rest/models/redaction_enum_redaction.py delete mode 100644 skyflow/generated/rest/models/request_action_type.py delete mode 100644 skyflow/generated/rest/models/v1_audit_after_options.py delete mode 100644 skyflow/generated/rest/models/v1_audit_event_response.py delete mode 100644 skyflow/generated/rest/models/v1_audit_response.py delete mode 100644 skyflow/generated/rest/models/v1_audit_response_event.py delete mode 100644 skyflow/generated/rest/models/v1_audit_response_event_request.py delete mode 100644 skyflow/generated/rest/models/v1_batch_operation_response.py delete mode 100644 skyflow/generated/rest/models/v1_batch_record.py delete mode 100644 skyflow/generated/rest/models/v1_bin_list_request.py delete mode 100644 skyflow/generated/rest/models/v1_bin_list_response.py delete mode 100644 skyflow/generated/rest/models/v1_bulk_delete_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_bulk_get_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_byot.py delete mode 100644 skyflow/generated/rest/models/v1_card.py delete mode 100644 skyflow/generated/rest/models/v1_delete_file_response.py delete mode 100644 skyflow/generated/rest/models/v1_delete_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_detokenize_payload.py delete mode 100644 skyflow/generated/rest/models/v1_detokenize_record_request.py delete mode 100644 skyflow/generated/rest/models/v1_detokenize_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_detokenize_response.py delete mode 100644 skyflow/generated/rest/models/v1_field_records.py delete mode 100644 skyflow/generated/rest/models/v1_file_av_scan_status.py delete mode 100644 skyflow/generated/rest/models/v1_get_auth_token_request.py delete mode 100644 skyflow/generated/rest/models/v1_get_auth_token_response.py delete mode 100644 skyflow/generated/rest/models/v1_get_file_scan_status_response.py delete mode 100644 skyflow/generated/rest/models/v1_get_query_response.py delete mode 100644 skyflow/generated/rest/models/v1_insert_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_member_type.py delete mode 100644 skyflow/generated/rest/models/v1_record_meta_properties.py delete mode 100644 skyflow/generated/rest/models/v1_tokenize_payload.py delete mode 100644 skyflow/generated/rest/models/v1_tokenize_record_request.py delete mode 100644 skyflow/generated/rest/models/v1_tokenize_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_tokenize_response.py delete mode 100644 skyflow/generated/rest/models/v1_update_record_response.py delete mode 100644 skyflow/generated/rest/models/v1_vault_field_mapping.py delete mode 100644 skyflow/generated/rest/models/v1_vault_schema_config.py create mode 100644 skyflow/generated/rest/query/__init__.py create mode 100644 skyflow/generated/rest/query/client.py create mode 100644 skyflow/generated/rest/records/__init__.py create mode 100644 skyflow/generated/rest/records/client.py create mode 100644 skyflow/generated/rest/records/types/__init__.py create mode 100644 skyflow/generated/rest/records/types/record_service_bulk_get_record_request_order_by.py create mode 100644 skyflow/generated/rest/records/types/record_service_bulk_get_record_request_redaction.py create mode 100644 skyflow/generated/rest/records/types/record_service_get_record_request_redaction.py delete mode 100644 skyflow/generated/rest/rest.py create mode 100644 skyflow/generated/rest/tokens/__init__.py create mode 100644 skyflow/generated/rest/tokens/client.py create mode 100644 skyflow/generated/rest/types/__init__.py create mode 100644 skyflow/generated/rest/types/audit_event_audit_resource_type.py create mode 100644 skyflow/generated/rest/types/audit_event_context.py create mode 100644 skyflow/generated/rest/types/audit_event_data.py create mode 100644 skyflow/generated/rest/types/audit_event_http_info.py create mode 100644 skyflow/generated/rest/types/batch_record_method.py create mode 100644 skyflow/generated/rest/types/context_access_type.py create mode 100644 skyflow/generated/rest/types/context_auth_mode.py create mode 100644 skyflow/generated/rest/types/detokenize_record_response_value_type.py create mode 100644 skyflow/generated/rest/types/googlerpc_status.py create mode 100644 skyflow/generated/rest/types/protobuf_any.py create mode 100644 skyflow/generated/rest/types/redaction_enum_redaction.py create mode 100644 skyflow/generated/rest/types/request_action_type.py create mode 100644 skyflow/generated/rest/types/v_1_audit_after_options.py create mode 100644 skyflow/generated/rest/types/v_1_audit_event_response.py create mode 100644 skyflow/generated/rest/types/v_1_audit_response.py create mode 100644 skyflow/generated/rest/types/v_1_audit_response_event.py create mode 100644 skyflow/generated/rest/types/v_1_audit_response_event_request.py create mode 100644 skyflow/generated/rest/types/v_1_batch_operation_response.py create mode 100644 skyflow/generated/rest/types/v_1_batch_record.py create mode 100644 skyflow/generated/rest/types/v_1_bin_list_response.py create mode 100644 skyflow/generated/rest/types/v_1_bulk_delete_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_bulk_get_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_byot.py create mode 100644 skyflow/generated/rest/types/v_1_card.py create mode 100644 skyflow/generated/rest/types/v_1_delete_file_response.py create mode 100644 skyflow/generated/rest/types/v_1_delete_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_detokenize_record_request.py create mode 100644 skyflow/generated/rest/types/v_1_detokenize_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_detokenize_response.py create mode 100644 skyflow/generated/rest/types/v_1_field_records.py create mode 100644 skyflow/generated/rest/types/v_1_file_av_scan_status.py create mode 100644 skyflow/generated/rest/types/v_1_get_auth_token_response.py create mode 100644 skyflow/generated/rest/types/v_1_get_file_scan_status_response.py create mode 100644 skyflow/generated/rest/types/v_1_get_query_response.py create mode 100644 skyflow/generated/rest/types/v_1_insert_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_member_type.py create mode 100644 skyflow/generated/rest/types/v_1_record_meta_properties.py create mode 100644 skyflow/generated/rest/types/v_1_tokenize_record_request.py create mode 100644 skyflow/generated/rest/types/v_1_tokenize_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_tokenize_response.py create mode 100644 skyflow/generated/rest/types/v_1_update_record_response.py create mode 100644 skyflow/generated/rest/types/v_1_vault_field_mapping.py create mode 100644 skyflow/generated/rest/types/v_1_vault_schema_config.py create mode 100644 skyflow/generated/rest/version.py create mode 100644 skyflow/utils/constants.py diff --git a/ci-scripts/bump_version.sh b/ci-scripts/bump_version.sh index a770e905..ab79e8aa 100755 --- a/ci-scripts/bump_version.sh +++ b/ci-scripts/bump_version.sh @@ -7,6 +7,7 @@ then sed -E "s/current_version = .+/current_version = '$SEMVER'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$SEMVER'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile + sed -E "s/__version__ = .+/__version__ = '$SEMVER'/g" skyflow/generated/rest/version.py > tempfile && cat tempfile > skyflow/generated/rest/version.py && rm -f tempfile echo -------------------------- echo "Done, Package now at $1" @@ -18,6 +19,7 @@ else sed -E "s/current_version = .+/current_version = '$DEV_VERSION'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$DEV_VERSION'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile + sed -E "s/__version__ = .+/__version__ = '$DEV_VERSION'/g" skyflow/generated/rest/version.py > tempfile && cat tempfile > skyflow/generated/rest/version.py && rm -f tempfile echo -------------------------- echo "Done, Package now at $DEV_VERSION" diff --git a/requirements.txt b/requirements.txt index 46a85940..687bfb9b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,5 @@ PyJWT~=2.9.0 requests~=2.32.3 coverage cryptography -python-dotenv~=1.0.1 \ No newline at end of file +python-dotenv~=1.0.1 +httpx \ No newline at end of file diff --git a/setup.py b/setup.py index 9d68a420..10181764 100644 --- a/setup.py +++ b/setup.py @@ -20,17 +20,18 @@ description='Skyflow SDK for the Python programming language', long_description=open('README.rst').read(), install_requires=[ - 'python_dateutil >= 2.5.3', - 'setuptools >= 21.0.0', - 'urllib3 >= 1.25.3, < 2.1.0', - 'pydantic >= 2', - 'typing-extensions >= 4.7.1', - 'DateTime~=5.5', - 'PyJWT~=2.9.0', - 'requests~=2.32.3', - 'coverage', - 'cryptography', - 'python-dotenv~=1.0.1' -], - python_requires=">=3.8" + 'python_dateutil >= 2.5.3', + 'setuptools >= 21.0.0', + 'urllib3 >= 1.25.3, < 2.1.0', + 'pydantic >= 2', + 'typing-extensions >= 4.7.1', + 'DateTime~=5.5', + 'PyJWT~=2.9.0', + 'requests~=2.32.3', + 'coverage', + 'cryptography', + 'python-dotenv~=1.0.1', + 'httpx' + ], + python_requires=">=3.8", ) diff --git a/skyflow/client/skyflow.py b/skyflow/client/skyflow.py index be3f7d9a..1c87bcaa 100644 --- a/skyflow/client/skyflow.py +++ b/skyflow/client/skyflow.py @@ -2,7 +2,7 @@ from skyflow import LogLevel from skyflow.error import SkyflowError from skyflow.utils import SkyflowMessages -from skyflow.utils.logger import log_info, Logger, log_error +from skyflow.utils.logger import log_info, Logger from skyflow.utils.validations import validate_vault_config, validate_connection_config, validate_update_vault_config, \ validate_update_connection_config, validate_credentials, validate_log_level from skyflow.vault.client.client import VaultClient diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index 1544b853..5cacae7e 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -1,88 +1,133 @@ -# coding: utf-8 +# This file was auto-generated by Fern from our API Definition. -# flake8: noqa +from .types import ( + AuditEventAuditResourceType, + AuditEventContext, + AuditEventData, + AuditEventHttpInfo, + BatchRecordMethod, + ContextAccessType, + ContextAuthMode, + DetokenizeRecordResponseValueType, + GooglerpcStatus, + ProtobufAny, + RedactionEnumRedaction, + RequestActionType, + V1AuditAfterOptions, + V1AuditEventResponse, + V1AuditResponse, + V1AuditResponseEvent, + V1AuditResponseEventRequest, + V1BatchOperationResponse, + V1BatchRecord, + V1BinListResponse, + V1BulkDeleteRecordResponse, + V1BulkGetRecordResponse, + V1Byot, + V1Card, + V1DeleteFileResponse, + V1DeleteRecordResponse, + V1DetokenizeRecordRequest, + V1DetokenizeRecordResponse, + V1DetokenizeResponse, + V1FieldRecords, + V1FileAvScanStatus, + V1GetAuthTokenResponse, + V1GetFileScanStatusResponse, + V1GetQueryResponse, + V1InsertRecordResponse, + V1MemberType, + V1RecordMetaProperties, + V1TokenizeRecordRequest, + V1TokenizeRecordResponse, + V1TokenizeResponse, + V1UpdateRecordResponse, + V1VaultFieldMapping, + V1VaultSchemaConfig, +) +from .errors import BadRequestError, NotFoundError, UnauthorizedError +from . import audit, authentication, bin_lookup, query, records, tokens +from .audit import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, + AuditServiceListAuditEventsRequestFilterOpsContextAccessType, + AuditServiceListAuditEventsRequestFilterOpsContextActorType, + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, + AuditServiceListAuditEventsRequestFilterOpsResourceType, + AuditServiceListAuditEventsRequestSortOpsOrderBy, +) +from .client import AsyncSkyflow, Skyflow +from .environment import SkyflowEnvironment +from .records import ( + RecordServiceBulkGetRecordRequestOrderBy, + RecordServiceBulkGetRecordRequestRedaction, + RecordServiceGetRecordRequestRedaction, +) +from .version import __version__ -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -__version__ = "1.0.0" - -# import apis into sdk package -from skyflow.generated.rest.api.audit_api import AuditApi -from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi -from skyflow.generated.rest.api.query_api import QueryApi -from skyflow.generated.rest.api.records_api import RecordsApi -from skyflow.generated.rest.api.tokens_api import TokensApi - -# import ApiClient -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.api_client import ApiClient -from skyflow.generated.rest.configuration import Configuration -from skyflow.generated.rest.exceptions import OpenApiException -from skyflow.generated.rest.exceptions import ApiTypeError -from skyflow.generated.rest.exceptions import ApiValueError -from skyflow.generated.rest.exceptions import ApiKeyError -from skyflow.generated.rest.exceptions import ApiAttributeError -from skyflow.generated.rest.exceptions import ApiException - -# import models into sdk package -from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType -from skyflow.generated.rest.models.audit_event_context import AuditEventContext -from skyflow.generated.rest.models.audit_event_data import AuditEventData -from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo -from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod -from skyflow.generated.rest.models.context_access_type import ContextAccessType -from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode -from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType -from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus -from skyflow.generated.rest.models.protobuf_any import ProtobufAny -from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody -from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody -from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody -from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody -from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody -from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION -from skyflow.generated.rest.models.request_action_type import RequestActionType -from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions -from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse -from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse -from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent -from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest -from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest -from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse -from skyflow.generated.rest.models.v1_byot import V1BYOT -from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse -from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord -from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse -from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse -from skyflow.generated.rest.models.v1_card import V1Card -from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse -from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse -from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload -from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest -from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse -from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus -from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse -from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse -from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse -from skyflow.generated.rest.models.v1_member_type import V1MemberType -from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties -from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload -from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest -from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse -from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse -from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse -from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping -from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig +__all__ = [ + "AsyncSkyflow", + "AuditEventAuditResourceType", + "AuditEventContext", + "AuditEventData", + "AuditEventHttpInfo", + "AuditServiceListAuditEventsRequestFilterOpsActionType", + "AuditServiceListAuditEventsRequestFilterOpsContextAccessType", + "AuditServiceListAuditEventsRequestFilterOpsContextActorType", + "AuditServiceListAuditEventsRequestFilterOpsContextAuthMode", + "AuditServiceListAuditEventsRequestFilterOpsResourceType", + "AuditServiceListAuditEventsRequestSortOpsOrderBy", + "BadRequestError", + "BatchRecordMethod", + "ContextAccessType", + "ContextAuthMode", + "DetokenizeRecordResponseValueType", + "GooglerpcStatus", + "NotFoundError", + "ProtobufAny", + "RecordServiceBulkGetRecordRequestOrderBy", + "RecordServiceBulkGetRecordRequestRedaction", + "RecordServiceGetRecordRequestRedaction", + "RedactionEnumRedaction", + "RequestActionType", + "Skyflow", + "SkyflowEnvironment", + "UnauthorizedError", + "V1AuditAfterOptions", + "V1AuditEventResponse", + "V1AuditResponse", + "V1AuditResponseEvent", + "V1AuditResponseEventRequest", + "V1BatchOperationResponse", + "V1BatchRecord", + "V1BinListResponse", + "V1BulkDeleteRecordResponse", + "V1BulkGetRecordResponse", + "V1Byot", + "V1Card", + "V1DeleteFileResponse", + "V1DeleteRecordResponse", + "V1DetokenizeRecordRequest", + "V1DetokenizeRecordResponse", + "V1DetokenizeResponse", + "V1FieldRecords", + "V1FileAvScanStatus", + "V1GetAuthTokenResponse", + "V1GetFileScanStatusResponse", + "V1GetQueryResponse", + "V1InsertRecordResponse", + "V1MemberType", + "V1RecordMetaProperties", + "V1TokenizeRecordRequest", + "V1TokenizeRecordResponse", + "V1TokenizeResponse", + "V1UpdateRecordResponse", + "V1VaultFieldMapping", + "V1VaultSchemaConfig", + "__version__", + "audit", + "authentication", + "bin_lookup", + "query", + "records", + "tokens", +] diff --git a/skyflow/generated/rest/api/__init__.py b/skyflow/generated/rest/api/__init__.py deleted file mode 100644 index 01b15fdb..00000000 --- a/skyflow/generated/rest/api/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# flake8: noqa - -# import apis into api package -from skyflow.generated.rest.api.audit_api import AuditApi -from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi -from skyflow.generated.rest.api.query_api import QueryApi -from skyflow.generated.rest.api.records_api import RecordsApi -from skyflow.generated.rest.api.tokens_api import TokensApi -from skyflow.generated.rest.api.authentication_api import AuthenticationApi diff --git a/skyflow/generated/rest/api/audit_api.py b/skyflow/generated/rest/api/audit_api.py deleted file mode 100644 index dc6de1fe..00000000 --- a/skyflow/generated/rest/api/audit_api.py +++ /dev/null @@ -1,848 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field, StrictInt, StrictStr, field_validator -from typing import Optional -from typing_extensions import Annotated -from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class AuditApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def audit_service_list_audit_events( - self, - filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], - filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, - filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, - filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, - filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, - filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, - filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, - filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, - filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, - filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, - filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, - filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, - filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, - filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, - filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, - filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, - filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, - filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, - filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, - filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, - filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, - filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, - filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, - filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, - filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, - filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, - filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, - sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, - sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, - after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, - offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1AuditResponse: - """List Audit Events - - Lists audit events that match query parameters. - - :param filter_ops_account_id: Resources with the specified account ID. (required) - :type filter_ops_account_id: str - :param filter_ops_context_change_id: ID for the audit event. - :type filter_ops_context_change_id: str - :param filter_ops_context_request_id: ID for the request that caused the event. - :type filter_ops_context_request_id: str - :param filter_ops_context_trace_id: ID for the request set by the service that received the request. - :type filter_ops_context_trace_id: str - :param filter_ops_context_session_id: ID for the session in which the request was sent. - :type filter_ops_context_session_id: str - :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. - :type filter_ops_context_actor: str - :param filter_ops_context_actor_type: Type of member who sent the request. - :type filter_ops_context_actor_type: str - :param filter_ops_context_access_type: Type of access for the request. - :type filter_ops_context_access_type: str - :param filter_ops_context_ip_address: IP Address of the client that made the request. - :type filter_ops_context_ip_address: str - :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. - :type filter_ops_context_origin: str - :param filter_ops_context_auth_mode: Authentication mode the `actor` used. - :type filter_ops_context_auth_mode: str - :param filter_ops_context_jwt_id: ID of the JWT token. - :type filter_ops_context_jwt_id: str - :param filter_ops_context_bearer_token_context_id: Embedded User Context. - :type filter_ops_context_bearer_token_context_id: str - :param filter_ops_parent_account_id: Resources with the specified parent account ID. - :type filter_ops_parent_account_id: str - :param filter_ops_workspace_id: Resources with the specified workspace ID. - :type filter_ops_workspace_id: str - :param filter_ops_vault_id: Resources with the specified vault ID. - :type filter_ops_vault_id: str - :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". - :type filter_ops_resource_ids: str - :param filter_ops_action_type: Events with the specified action type. - :type filter_ops_action_type: str - :param filter_ops_resource_type: Resources with the specified type. - :type filter_ops_resource_type: str - :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". - :type filter_ops_tags: str - :param filter_ops_response_code: HTTP response code of the request. - :type filter_ops_response_code: int - :param filter_ops_start_time: Start timestamp for the query, in SQL format. - :type filter_ops_start_time: str - :param filter_ops_end_time: End timestamp for the query, in SQL format. - :type filter_ops_end_time: str - :param filter_ops_api_name: Name of the API called in the request. - :type filter_ops_api_name: str - :param filter_ops_response_message: Response message of the request. - :type filter_ops_response_message: str - :param filter_ops_http_method: HTTP method of the request. - :type filter_ops_http_method: str - :param filter_ops_http_uri: HTTP URI of the request. - :type filter_ops_http_uri: str - :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). - :type sort_ops_sort_by: str - :param sort_ops_order_by: Ascending or descending ordering of results. - :type sort_ops_order_by: str - :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_timestamp: str - :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_change_id: str - :param limit: Number of results to return. - :type limit: int - :param offset: Record position at which to start returning results. - :type offset: int - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._audit_service_list_audit_events_serialize( - filter_ops_account_id=filter_ops_account_id, - filter_ops_context_change_id=filter_ops_context_change_id, - filter_ops_context_request_id=filter_ops_context_request_id, - filter_ops_context_trace_id=filter_ops_context_trace_id, - filter_ops_context_session_id=filter_ops_context_session_id, - filter_ops_context_actor=filter_ops_context_actor, - filter_ops_context_actor_type=filter_ops_context_actor_type, - filter_ops_context_access_type=filter_ops_context_access_type, - filter_ops_context_ip_address=filter_ops_context_ip_address, - filter_ops_context_origin=filter_ops_context_origin, - filter_ops_context_auth_mode=filter_ops_context_auth_mode, - filter_ops_context_jwt_id=filter_ops_context_jwt_id, - filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, - filter_ops_parent_account_id=filter_ops_parent_account_id, - filter_ops_workspace_id=filter_ops_workspace_id, - filter_ops_vault_id=filter_ops_vault_id, - filter_ops_resource_ids=filter_ops_resource_ids, - filter_ops_action_type=filter_ops_action_type, - filter_ops_resource_type=filter_ops_resource_type, - filter_ops_tags=filter_ops_tags, - filter_ops_response_code=filter_ops_response_code, - filter_ops_start_time=filter_ops_start_time, - filter_ops_end_time=filter_ops_end_time, - filter_ops_api_name=filter_ops_api_name, - filter_ops_response_message=filter_ops_response_message, - filter_ops_http_method=filter_ops_http_method, - filter_ops_http_uri=filter_ops_http_uri, - sort_ops_sort_by=sort_ops_sort_by, - sort_ops_order_by=sort_ops_order_by, - after_ops_timestamp=after_ops_timestamp, - after_ops_change_id=after_ops_change_id, - limit=limit, - offset=offset, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1AuditResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def audit_service_list_audit_events_with_http_info( - self, - filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], - filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, - filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, - filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, - filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, - filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, - filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, - filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, - filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, - filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, - filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, - filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, - filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, - filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, - filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, - filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, - filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, - filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, - filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, - filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, - filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, - filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, - filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, - filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, - filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, - filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, - filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, - sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, - sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, - after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, - offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1AuditResponse]: - """List Audit Events - - Lists audit events that match query parameters. - - :param filter_ops_account_id: Resources with the specified account ID. (required) - :type filter_ops_account_id: str - :param filter_ops_context_change_id: ID for the audit event. - :type filter_ops_context_change_id: str - :param filter_ops_context_request_id: ID for the request that caused the event. - :type filter_ops_context_request_id: str - :param filter_ops_context_trace_id: ID for the request set by the service that received the request. - :type filter_ops_context_trace_id: str - :param filter_ops_context_session_id: ID for the session in which the request was sent. - :type filter_ops_context_session_id: str - :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. - :type filter_ops_context_actor: str - :param filter_ops_context_actor_type: Type of member who sent the request. - :type filter_ops_context_actor_type: str - :param filter_ops_context_access_type: Type of access for the request. - :type filter_ops_context_access_type: str - :param filter_ops_context_ip_address: IP Address of the client that made the request. - :type filter_ops_context_ip_address: str - :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. - :type filter_ops_context_origin: str - :param filter_ops_context_auth_mode: Authentication mode the `actor` used. - :type filter_ops_context_auth_mode: str - :param filter_ops_context_jwt_id: ID of the JWT token. - :type filter_ops_context_jwt_id: str - :param filter_ops_context_bearer_token_context_id: Embedded User Context. - :type filter_ops_context_bearer_token_context_id: str - :param filter_ops_parent_account_id: Resources with the specified parent account ID. - :type filter_ops_parent_account_id: str - :param filter_ops_workspace_id: Resources with the specified workspace ID. - :type filter_ops_workspace_id: str - :param filter_ops_vault_id: Resources with the specified vault ID. - :type filter_ops_vault_id: str - :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". - :type filter_ops_resource_ids: str - :param filter_ops_action_type: Events with the specified action type. - :type filter_ops_action_type: str - :param filter_ops_resource_type: Resources with the specified type. - :type filter_ops_resource_type: str - :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". - :type filter_ops_tags: str - :param filter_ops_response_code: HTTP response code of the request. - :type filter_ops_response_code: int - :param filter_ops_start_time: Start timestamp for the query, in SQL format. - :type filter_ops_start_time: str - :param filter_ops_end_time: End timestamp for the query, in SQL format. - :type filter_ops_end_time: str - :param filter_ops_api_name: Name of the API called in the request. - :type filter_ops_api_name: str - :param filter_ops_response_message: Response message of the request. - :type filter_ops_response_message: str - :param filter_ops_http_method: HTTP method of the request. - :type filter_ops_http_method: str - :param filter_ops_http_uri: HTTP URI of the request. - :type filter_ops_http_uri: str - :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). - :type sort_ops_sort_by: str - :param sort_ops_order_by: Ascending or descending ordering of results. - :type sort_ops_order_by: str - :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_timestamp: str - :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_change_id: str - :param limit: Number of results to return. - :type limit: int - :param offset: Record position at which to start returning results. - :type offset: int - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._audit_service_list_audit_events_serialize( - filter_ops_account_id=filter_ops_account_id, - filter_ops_context_change_id=filter_ops_context_change_id, - filter_ops_context_request_id=filter_ops_context_request_id, - filter_ops_context_trace_id=filter_ops_context_trace_id, - filter_ops_context_session_id=filter_ops_context_session_id, - filter_ops_context_actor=filter_ops_context_actor, - filter_ops_context_actor_type=filter_ops_context_actor_type, - filter_ops_context_access_type=filter_ops_context_access_type, - filter_ops_context_ip_address=filter_ops_context_ip_address, - filter_ops_context_origin=filter_ops_context_origin, - filter_ops_context_auth_mode=filter_ops_context_auth_mode, - filter_ops_context_jwt_id=filter_ops_context_jwt_id, - filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, - filter_ops_parent_account_id=filter_ops_parent_account_id, - filter_ops_workspace_id=filter_ops_workspace_id, - filter_ops_vault_id=filter_ops_vault_id, - filter_ops_resource_ids=filter_ops_resource_ids, - filter_ops_action_type=filter_ops_action_type, - filter_ops_resource_type=filter_ops_resource_type, - filter_ops_tags=filter_ops_tags, - filter_ops_response_code=filter_ops_response_code, - filter_ops_start_time=filter_ops_start_time, - filter_ops_end_time=filter_ops_end_time, - filter_ops_api_name=filter_ops_api_name, - filter_ops_response_message=filter_ops_response_message, - filter_ops_http_method=filter_ops_http_method, - filter_ops_http_uri=filter_ops_http_uri, - sort_ops_sort_by=sort_ops_sort_by, - sort_ops_order_by=sort_ops_order_by, - after_ops_timestamp=after_ops_timestamp, - after_ops_change_id=after_ops_change_id, - limit=limit, - offset=offset, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1AuditResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def audit_service_list_audit_events_without_preload_content( - self, - filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], - filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, - filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, - filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, - filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, - filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, - filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, - filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, - filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, - filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, - filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, - filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, - filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, - filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, - filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, - filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, - filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, - filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, - filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, - filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, - filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, - filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, - filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, - filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, - filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, - filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, - filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, - sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, - sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, - after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, - limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, - offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """List Audit Events - - Lists audit events that match query parameters. - - :param filter_ops_account_id: Resources with the specified account ID. (required) - :type filter_ops_account_id: str - :param filter_ops_context_change_id: ID for the audit event. - :type filter_ops_context_change_id: str - :param filter_ops_context_request_id: ID for the request that caused the event. - :type filter_ops_context_request_id: str - :param filter_ops_context_trace_id: ID for the request set by the service that received the request. - :type filter_ops_context_trace_id: str - :param filter_ops_context_session_id: ID for the session in which the request was sent. - :type filter_ops_context_session_id: str - :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. - :type filter_ops_context_actor: str - :param filter_ops_context_actor_type: Type of member who sent the request. - :type filter_ops_context_actor_type: str - :param filter_ops_context_access_type: Type of access for the request. - :type filter_ops_context_access_type: str - :param filter_ops_context_ip_address: IP Address of the client that made the request. - :type filter_ops_context_ip_address: str - :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. - :type filter_ops_context_origin: str - :param filter_ops_context_auth_mode: Authentication mode the `actor` used. - :type filter_ops_context_auth_mode: str - :param filter_ops_context_jwt_id: ID of the JWT token. - :type filter_ops_context_jwt_id: str - :param filter_ops_context_bearer_token_context_id: Embedded User Context. - :type filter_ops_context_bearer_token_context_id: str - :param filter_ops_parent_account_id: Resources with the specified parent account ID. - :type filter_ops_parent_account_id: str - :param filter_ops_workspace_id: Resources with the specified workspace ID. - :type filter_ops_workspace_id: str - :param filter_ops_vault_id: Resources with the specified vault ID. - :type filter_ops_vault_id: str - :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". - :type filter_ops_resource_ids: str - :param filter_ops_action_type: Events with the specified action type. - :type filter_ops_action_type: str - :param filter_ops_resource_type: Resources with the specified type. - :type filter_ops_resource_type: str - :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". - :type filter_ops_tags: str - :param filter_ops_response_code: HTTP response code of the request. - :type filter_ops_response_code: int - :param filter_ops_start_time: Start timestamp for the query, in SQL format. - :type filter_ops_start_time: str - :param filter_ops_end_time: End timestamp for the query, in SQL format. - :type filter_ops_end_time: str - :param filter_ops_api_name: Name of the API called in the request. - :type filter_ops_api_name: str - :param filter_ops_response_message: Response message of the request. - :type filter_ops_response_message: str - :param filter_ops_http_method: HTTP method of the request. - :type filter_ops_http_method: str - :param filter_ops_http_uri: HTTP URI of the request. - :type filter_ops_http_uri: str - :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). - :type sort_ops_sort_by: str - :param sort_ops_order_by: Ascending or descending ordering of results. - :type sort_ops_order_by: str - :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_timestamp: str - :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. - :type after_ops_change_id: str - :param limit: Number of results to return. - :type limit: int - :param offset: Record position at which to start returning results. - :type offset: int - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._audit_service_list_audit_events_serialize( - filter_ops_account_id=filter_ops_account_id, - filter_ops_context_change_id=filter_ops_context_change_id, - filter_ops_context_request_id=filter_ops_context_request_id, - filter_ops_context_trace_id=filter_ops_context_trace_id, - filter_ops_context_session_id=filter_ops_context_session_id, - filter_ops_context_actor=filter_ops_context_actor, - filter_ops_context_actor_type=filter_ops_context_actor_type, - filter_ops_context_access_type=filter_ops_context_access_type, - filter_ops_context_ip_address=filter_ops_context_ip_address, - filter_ops_context_origin=filter_ops_context_origin, - filter_ops_context_auth_mode=filter_ops_context_auth_mode, - filter_ops_context_jwt_id=filter_ops_context_jwt_id, - filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, - filter_ops_parent_account_id=filter_ops_parent_account_id, - filter_ops_workspace_id=filter_ops_workspace_id, - filter_ops_vault_id=filter_ops_vault_id, - filter_ops_resource_ids=filter_ops_resource_ids, - filter_ops_action_type=filter_ops_action_type, - filter_ops_resource_type=filter_ops_resource_type, - filter_ops_tags=filter_ops_tags, - filter_ops_response_code=filter_ops_response_code, - filter_ops_start_time=filter_ops_start_time, - filter_ops_end_time=filter_ops_end_time, - filter_ops_api_name=filter_ops_api_name, - filter_ops_response_message=filter_ops_response_message, - filter_ops_http_method=filter_ops_http_method, - filter_ops_http_uri=filter_ops_http_uri, - sort_ops_sort_by=sort_ops_sort_by, - sort_ops_order_by=sort_ops_order_by, - after_ops_timestamp=after_ops_timestamp, - after_ops_change_id=after_ops_change_id, - limit=limit, - offset=offset, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1AuditResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _audit_service_list_audit_events_serialize( - self, - filter_ops_account_id, - filter_ops_context_change_id, - filter_ops_context_request_id, - filter_ops_context_trace_id, - filter_ops_context_session_id, - filter_ops_context_actor, - filter_ops_context_actor_type, - filter_ops_context_access_type, - filter_ops_context_ip_address, - filter_ops_context_origin, - filter_ops_context_auth_mode, - filter_ops_context_jwt_id, - filter_ops_context_bearer_token_context_id, - filter_ops_parent_account_id, - filter_ops_workspace_id, - filter_ops_vault_id, - filter_ops_resource_ids, - filter_ops_action_type, - filter_ops_resource_type, - filter_ops_tags, - filter_ops_response_code, - filter_ops_start_time, - filter_ops_end_time, - filter_ops_api_name, - filter_ops_response_message, - filter_ops_http_method, - filter_ops_http_uri, - sort_ops_sort_by, - sort_ops_order_by, - after_ops_timestamp, - after_ops_change_id, - limit, - offset, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - # process the query parameters - if filter_ops_context_change_id is not None: - - _query_params.append(('filterOps.context.changeID', filter_ops_context_change_id)) - - if filter_ops_context_request_id is not None: - - _query_params.append(('filterOps.context.requestID', filter_ops_context_request_id)) - - if filter_ops_context_trace_id is not None: - - _query_params.append(('filterOps.context.traceID', filter_ops_context_trace_id)) - - if filter_ops_context_session_id is not None: - - _query_params.append(('filterOps.context.sessionID', filter_ops_context_session_id)) - - if filter_ops_context_actor is not None: - - _query_params.append(('filterOps.context.actor', filter_ops_context_actor)) - - if filter_ops_context_actor_type is not None: - - _query_params.append(('filterOps.context.actorType', filter_ops_context_actor_type)) - - if filter_ops_context_access_type is not None: - - _query_params.append(('filterOps.context.accessType', filter_ops_context_access_type)) - - if filter_ops_context_ip_address is not None: - - _query_params.append(('filterOps.context.ipAddress', filter_ops_context_ip_address)) - - if filter_ops_context_origin is not None: - - _query_params.append(('filterOps.context.origin', filter_ops_context_origin)) - - if filter_ops_context_auth_mode is not None: - - _query_params.append(('filterOps.context.authMode', filter_ops_context_auth_mode)) - - if filter_ops_context_jwt_id is not None: - - _query_params.append(('filterOps.context.jwtID', filter_ops_context_jwt_id)) - - if filter_ops_context_bearer_token_context_id is not None: - - _query_params.append(('filterOps.context.bearerTokenContextID', filter_ops_context_bearer_token_context_id)) - - if filter_ops_parent_account_id is not None: - - _query_params.append(('filterOps.parentAccountID', filter_ops_parent_account_id)) - - if filter_ops_account_id is not None: - - _query_params.append(('filterOps.accountID', filter_ops_account_id)) - - if filter_ops_workspace_id is not None: - - _query_params.append(('filterOps.workspaceID', filter_ops_workspace_id)) - - if filter_ops_vault_id is not None: - - _query_params.append(('filterOps.vaultID', filter_ops_vault_id)) - - if filter_ops_resource_ids is not None: - - _query_params.append(('filterOps.resourceIDs', filter_ops_resource_ids)) - - if filter_ops_action_type is not None: - - _query_params.append(('filterOps.actionType', filter_ops_action_type)) - - if filter_ops_resource_type is not None: - - _query_params.append(('filterOps.resourceType', filter_ops_resource_type)) - - if filter_ops_tags is not None: - - _query_params.append(('filterOps.tags', filter_ops_tags)) - - if filter_ops_response_code is not None: - - _query_params.append(('filterOps.responseCode', filter_ops_response_code)) - - if filter_ops_start_time is not None: - - _query_params.append(('filterOps.startTime', filter_ops_start_time)) - - if filter_ops_end_time is not None: - - _query_params.append(('filterOps.endTime', filter_ops_end_time)) - - if filter_ops_api_name is not None: - - _query_params.append(('filterOps.apiName', filter_ops_api_name)) - - if filter_ops_response_message is not None: - - _query_params.append(('filterOps.responseMessage', filter_ops_response_message)) - - if filter_ops_http_method is not None: - - _query_params.append(('filterOps.httpMethod', filter_ops_http_method)) - - if filter_ops_http_uri is not None: - - _query_params.append(('filterOps.httpURI', filter_ops_http_uri)) - - if sort_ops_sort_by is not None: - - _query_params.append(('sortOps.sortBy', sort_ops_sort_by)) - - if sort_ops_order_by is not None: - - _query_params.append(('sortOps.orderBy', sort_ops_order_by)) - - if after_ops_timestamp is not None: - - _query_params.append(('afterOps.timestamp', after_ops_timestamp)) - - if after_ops_change_id is not None: - - _query_params.append(('afterOps.changeID', after_ops_change_id)) - - if limit is not None: - - _query_params.append(('limit', limit)) - - if offset is not None: - - _query_params.append(('offset', offset)) - - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/v1/audit/events', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api/authentication_api.py b/skyflow/generated/rest/api/authentication_api.py deleted file mode 100644 index 8abbbf67..00000000 --- a/skyflow/generated/rest/api/authentication_api.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Management API - - # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest -from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class AuthenticationApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def authentication_service_get_auth_token( - self, - body: V1GetAuthTokenRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1GetAuthTokenResponse: - """Get Bearer Token - -

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

- - :param body: (required) - :type body: V1GetAuthTokenRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._authentication_service_get_auth_token_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetAuthTokenResponse", - '400': "object", - '401': "object", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def authentication_service_get_auth_token_with_http_info( - self, - body: V1GetAuthTokenRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1GetAuthTokenResponse]: - """Get Bearer Token - -

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

- - :param body: (required) - :type body: V1GetAuthTokenRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._authentication_service_get_auth_token_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetAuthTokenResponse", - '400': "object", - '401': "object", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def authentication_service_get_auth_token_without_preload_content( - self, - body: V1GetAuthTokenRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get Bearer Token - -

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

- - :param body: (required) - :type body: V1GetAuthTokenRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._authentication_service_get_auth_token_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetAuthTokenResponse", - '400': "object", - '401': "object", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _authentication_service_get_auth_token_serialize( - self, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/auth/sa/oauth/token', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api/bin_lookup_api.py b/skyflow/generated/rest/api/bin_lookup_api.py deleted file mode 100644 index 1bb3e64b..00000000 --- a/skyflow/generated/rest/api/bin_lookup_api.py +++ /dev/null @@ -1,315 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field -from typing_extensions import Annotated -from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest -from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class BINLookupApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def b_in_list_service_list_cards_of_bin( - self, - body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1BINListResponse: - """Get BIN - - Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. - - :param body: Request to return specific card metadata. (required) - :type body: V1BINListRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._b_in_list_service_list_cards_of_bin_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BINListResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def b_in_list_service_list_cards_of_bin_with_http_info( - self, - body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1BINListResponse]: - """Get BIN - - Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. - - :param body: Request to return specific card metadata. (required) - :type body: V1BINListRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._b_in_list_service_list_cards_of_bin_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BINListResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def b_in_list_service_list_cards_of_bin_without_preload_content( - self, - body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get BIN - - Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. - - :param body: Request to return specific card metadata. (required) - :type body: V1BINListRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._b_in_list_service_list_cards_of_bin_serialize( - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BINListResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _b_in_list_service_list_cards_of_bin_serialize( - self, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/card_lookup', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api/query_api.py b/skyflow/generated/rest/api/query_api.py deleted file mode 100644 index edf04f27..00000000 --- a/skyflow/generated/rest/api/query_api.py +++ /dev/null @@ -1,330 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field, StrictStr -from typing_extensions import Annotated -from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody -from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class QueryApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def query_service_execute_query( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: QueryServiceExecuteQueryBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1GetQueryResponse: - """Execute Query - - Returns records for a valid SQL query. This endpoint
  • Can return redacted record values.
  • Supports only the SELECT command.
  • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
  • Can't modify the vault or perform transactions.
  • Can't return tokens.
  • Can't return file download or render URLs.
  • Doesn't support the WHERE keyword with columns using transient tokenization.
  • Doesn't support `?` conditional for columns with column-level encryption disabled.
    • - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: QueryServiceExecuteQueryBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._query_service_execute_query_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetQueryResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def query_service_execute_query_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: QueryServiceExecuteQueryBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1GetQueryResponse]: - """Execute Query - - Returns records for a valid SQL query. This endpoint
      • Can return redacted record values.
      • Supports only the SELECT command.
      • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
      • Can't modify the vault or perform transactions.
      • Can't return tokens.
      • Can't return file download or render URLs.
      • Doesn't support the WHERE keyword with columns using transient tokenization.
      • Doesn't support `?` conditional for columns with column-level encryption disabled.
        • - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: QueryServiceExecuteQueryBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._query_service_execute_query_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetQueryResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def query_service_execute_query_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: QueryServiceExecuteQueryBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Execute Query - - Returns records for a valid SQL query. This endpoint
          • Can return redacted record values.
          • Supports only the SELECT command.
          • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
          • Can't modify the vault or perform transactions.
          • Can't return tokens.
          • Can't return file download or render URLs.
          • Doesn't support the WHERE keyword with columns using transient tokenization.
          • Doesn't support `?` conditional for columns with column-level encryption disabled.
            • - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: QueryServiceExecuteQueryBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._query_service_execute_query_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetQueryResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _query_service_execute_query_serialize( - self, - vault_id, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}/query', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api/records_api.py b/skyflow/generated/rest/api/records_api.py deleted file mode 100644 index ae9a2c29..00000000 --- a/skyflow/generated/rest/api/records_api.py +++ /dev/null @@ -1,3310 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
              • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
              • Production: https://*identifier*.vault.skyflowapis.com
              When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field, StrictBool, StrictBytes, StrictStr, field_validator -from typing import List, Optional, Union -from typing_extensions import Annotated -from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody -from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody -from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody -from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody -from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse -from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse -from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse -from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse -from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse -from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse -from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class RecordsApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def file_service_delete_file( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1DeleteFileResponse: - """Delete File - - Deletes a file from the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_delete_file_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteFileResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def file_service_delete_file_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1DeleteFileResponse]: - """Delete File - - Deletes a file from the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_delete_file_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteFileResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def file_service_delete_file_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete File - - Deletes a file from the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_delete_file_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteFileResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _file_service_delete_file_serialize( - self, - vault_id, - table_name, - id, - column_name, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if table_name is not None: - _path_params['tableName'] = table_name - if id is not None: - _path_params['ID'] = id - if column_name is not None: - _path_params['columnName'] = column_name - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def file_service_get_file_scan_status( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1GetFileScanStatusResponse: - """Get File Scan Status - - Returns the anti-virus scan status of a file. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_get_file_scan_status_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetFileScanStatusResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def file_service_get_file_scan_status_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1GetFileScanStatusResponse]: - """Get File Scan Status - - Returns the anti-virus scan status of a file. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_get_file_scan_status_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetFileScanStatusResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def file_service_get_file_scan_status_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - table_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get File Scan Status - - Returns the anti-virus scan status of a file. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param table_name: Name of the table. (required) - :type table_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param column_name: Name of the column that contains the file. (required) - :type column_name: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_get_file_scan_status_serialize( - vault_id=vault_id, - table_name=table_name, - id=id, - column_name=column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1GetFileScanStatusResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _file_service_get_file_scan_status_serialize( - self, - vault_id, - table_name, - id, - column_name, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if table_name is not None: - _path_params['tableName'] = table_name - if id is not None: - _path_params['ID'] = id - if column_name is not None: - _path_params['columnName'] = column_name - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}/scan-status', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def file_service_upload_file( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1UpdateRecordResponse: - """Upload File - - Uploads a file to the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param file_column_name: Name of the column to store the file in. The column must have a file data type. - :type file_column_name: bytearray - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_upload_file_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - file_column_name=file_column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def file_service_upload_file_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1UpdateRecordResponse]: - """Upload File - - Uploads a file to the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param file_column_name: Name of the column to store the file in. The column must have a file data type. - :type file_column_name: bytearray - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_upload_file_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - file_column_name=file_column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def file_service_upload_file_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Upload File - - Uploads a file to the specified record. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param file_column_name: Name of the column to store the file in. The column must have a file data type. - :type file_column_name: bytearray - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._file_service_upload_file_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - file_column_name=file_column_name, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _file_service_upload_file_serialize( - self, - vault_id, - object_name, - id, - file_column_name, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - if id is not None: - _path_params['ID'] = id - # process the query parameters - # process the header parameters - # process the form parameters - if file_column_name is not None: - _files['fileColumnName'] = file_column_name - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'multipart/form-data' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}/files', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_batch_operation( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: RecordServiceBatchOperationBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1BatchOperationResponse: - """Batch Operation - - Performs multiple record operations in a single transaction. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: RecordServiceBatchOperationBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_batch_operation_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BatchOperationResponse", - '207': "V1BatchOperationResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_batch_operation_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: RecordServiceBatchOperationBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1BatchOperationResponse]: - """Batch Operation - - Performs multiple record operations in a single transaction. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: RecordServiceBatchOperationBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_batch_operation_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BatchOperationResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_batch_operation_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - body: RecordServiceBatchOperationBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Batch Operation - - Performs multiple record operations in a single transaction. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param body: (required) - :type body: RecordServiceBatchOperationBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_batch_operation_serialize( - vault_id=vault_id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BatchOperationResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_batch_operation_serialize( - self, - vault_id, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_bulk_delete_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceBulkDeleteRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1BulkDeleteRecordResponse: - """Bulk Delete Records - - Deletes the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceBulkDeleteRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkDeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_bulk_delete_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceBulkDeleteRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1BulkDeleteRecordResponse]: - """Bulk Delete Records - - Deletes the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceBulkDeleteRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkDeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_bulk_delete_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceBulkDeleteRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Bulk Delete Records - - Deletes the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceBulkDeleteRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkDeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_bulk_delete_record_serialize( - self, - vault_id, - object_name, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/v1/vaults/{vaultID}/{objectName}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_bulk_get_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], - skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, - limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1BulkGetRecordResponse: - """Get Record(s) - - Gets the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table that contains the records. (required) - :type object_name: str - :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. - :type skyflow_ids: List[str] - :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param offset: Record position at which to start receiving data. - :type offset: str - :param limit: Number of record to return. Maximum 25. - :type limit: str - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_name: str - :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_values: List[str] - :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. - :type order_by: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - skyflow_ids=skyflow_ids, - redaction=redaction, - tokenization=tokenization, - fields=fields, - offset=offset, - limit=limit, - download_url=download_url, - column_name=column_name, - column_values=column_values, - order_by=order_by, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkGetRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_bulk_get_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], - skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, - limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1BulkGetRecordResponse]: - """Get Record(s) - - Gets the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table that contains the records. (required) - :type object_name: str - :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. - :type skyflow_ids: List[str] - :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param offset: Record position at which to start receiving data. - :type offset: str - :param limit: Number of record to return. Maximum 25. - :type limit: str - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_name: str - :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_values: List[str] - :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. - :type order_by: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - skyflow_ids=skyflow_ids, - redaction=redaction, - tokenization=tokenization, - fields=fields, - offset=offset, - limit=limit, - download_url=download_url, - column_name=column_name, - column_values=column_values, - order_by=order_by, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkGetRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_bulk_get_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], - skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, - limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, - order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get Record(s) - - Gets the specified records from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table that contains the records. (required) - :type object_name: str - :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. - :type skyflow_ids: List[str] - :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param offset: Record position at which to start receiving data. - :type offset: str - :param limit: Number of record to return. Maximum 25. - :type limit: str - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_name: str - :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. - :type column_values: List[str] - :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. - :type order_by: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_bulk_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - skyflow_ids=skyflow_ids, - redaction=redaction, - tokenization=tokenization, - fields=fields, - offset=offset, - limit=limit, - download_url=download_url, - column_name=column_name, - column_values=column_values, - order_by=order_by, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1BulkGetRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_bulk_get_record_serialize( - self, - vault_id, - object_name, - skyflow_ids, - redaction, - tokenization, - fields, - offset, - limit, - download_url, - column_name, - column_values, - order_by, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - 'skyflow_ids': 'multi', - 'fields': 'multi', - 'column_values': 'multi', - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - # process the query parameters - if skyflow_ids is not None: - - _query_params.append(('skyflow_ids', skyflow_ids)) - - if redaction is not None: - - _query_params.append(('redaction', redaction)) - - if tokenization is not None: - - _query_params.append(('tokenization', tokenization)) - - if fields is not None: - - _query_params.append(('fields', fields)) - - if offset is not None: - - _query_params.append(('offset', offset)) - - if limit is not None: - - _query_params.append(('limit', limit)) - - if download_url is not None: - - _query_params.append(('downloadURL', download_url)) - - if column_name is not None: - - _query_params.append(('column_name', column_name)) - - if column_values is not None: - - _query_params.append(('column_values', column_values)) - - if order_by is not None: - - _query_params.append(('order_by', order_by)) - - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/v1/vaults/{vaultID}/{objectName}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_delete_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1DeleteRecordResponse: - """Delete Record - - Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record to delete. (required) - :type id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_delete_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1DeleteRecordResponse]: - """Delete Record - - Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record to delete. (required) - :type id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_delete_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete Record - - Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record to delete. (required) - :type id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_delete_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DeleteRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_delete_record_serialize( - self, - vault_id, - object_name, - id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - if id is not None: - _path_params['ID'] = id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_get_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1FieldRecords: - """Get Record By ID - - Returns the specified record from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - redaction=redaction, - tokenization=tokenization, - fields=fields, - download_url=download_url, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1FieldRecords", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_get_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1FieldRecords]: - """Get Record By ID - - Returns the specified record from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - redaction=redaction, - tokenization=tokenization, - fields=fields, - download_url=download_url, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1FieldRecords", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_get_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, - tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, - fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, - download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get Record By ID - - Returns the specified record from a table. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. - :type redaction: str - :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. - :type tokenization: bool - :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. - :type fields: List[str] - :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. - :type download_url: bool - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_get_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - redaction=redaction, - tokenization=tokenization, - fields=fields, - download_url=download_url, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1FieldRecords", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_get_record_serialize( - self, - vault_id, - object_name, - id, - redaction, - tokenization, - fields, - download_url, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - 'fields': 'multi', - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - if id is not None: - _path_params['ID'] = id - # process the query parameters - if redaction is not None: - - _query_params.append(('redaction', redaction)) - - if tokenization is not None: - - _query_params.append(('tokenization', tokenization)) - - if fields is not None: - - _query_params.append(('fields', fields)) - - if download_url is not None: - - _query_params.append(('downloadURL', download_url)) - - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_insert_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceInsertRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1InsertRecordResponse: - """Insert Records - - Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceInsertRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_insert_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1InsertRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_insert_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceInsertRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1InsertRecordResponse]: - """Insert Records - - Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceInsertRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_insert_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1InsertRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_insert_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - body: RecordServiceInsertRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Insert Records - - Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param body: (required) - :type body: RecordServiceInsertRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_insert_record_serialize( - vault_id=vault_id, - object_name=object_name, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1InsertRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_insert_record_serialize( - self, - vault_id, - object_name, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}/{objectName}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_update_record( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - body: RecordServiceUpdateRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1UpdateRecordResponse: - """Update Record - - Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param body: (required) - :type body: RecordServiceUpdateRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_update_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_update_record_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - body: RecordServiceUpdateRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1UpdateRecordResponse]: - """Update Record - - Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param body: (required) - :type body: RecordServiceUpdateRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_update_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_update_record_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - object_name: Annotated[StrictStr, Field(description="Name of the table.")], - id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], - body: RecordServiceUpdateRecordBody, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Update Record - - Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param object_name: Name of the table. (required) - :type object_name: str - :param id: `skyflow_id` of the record. (required) - :type id: str - :param body: (required) - :type body: RecordServiceUpdateRecordBody - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_update_record_serialize( - vault_id=vault_id, - object_name=object_name, - id=id, - body=body, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1UpdateRecordResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_update_record_serialize( - self, - vault_id, - object_name, - id, - body, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - if object_name is not None: - _path_params['objectName'] = object_name - if id is not None: - _path_params['ID'] = id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if body is not None: - _body_params = body - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='PUT', - resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api/tokens_api.py b/skyflow/generated/rest/api/tokens_api.py deleted file mode 100644 index e21e7935..00000000 --- a/skyflow/generated/rest/api/tokens_api.py +++ /dev/null @@ -1,623 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field, StrictStr -from typing_extensions import Annotated -from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload -from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse -from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload -from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse - -from skyflow.generated.rest.api_client import ApiClient, RequestSerialized -from skyflow.generated.rest.api_response import ApiResponse -from skyflow.generated.rest.rest import RESTResponseType - - -class TokensApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def record_service_detokenize( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - detokenize_payload: V1DetokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1DetokenizeResponse: - """Detokenize - - Returns records that correspond to the specified tokens. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param detokenize_payload: (required) - :type detokenize_payload: V1DetokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_detokenize_serialize( - vault_id=vault_id, - detokenize_payload=detokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DetokenizeResponse", - '207': "V1DetokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_detokenize_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - detokenize_payload: V1DetokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1DetokenizeResponse]: - """Detokenize - - Returns records that correspond to the specified tokens. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param detokenize_payload: (required) - :type detokenize_payload: V1DetokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_detokenize_serialize( - vault_id=vault_id, - detokenize_payload=detokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DetokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_detokenize_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - detokenize_payload: V1DetokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Detokenize - - Returns records that correspond to the specified tokens. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param detokenize_payload: (required) - :type detokenize_payload: V1DetokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_detokenize_serialize( - vault_id=vault_id, - detokenize_payload=detokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1DetokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_detokenize_serialize( - self, - vault_id, - detokenize_payload, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if detokenize_payload is not None: - _body_params = detokenize_payload - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}/detokenize', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def record_service_tokenize( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - tokenize_payload: V1TokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> V1TokenizeResponse: - """Tokenize - - Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param tokenize_payload: (required) - :type tokenize_payload: V1TokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_tokenize_serialize( - vault_id=vault_id, - tokenize_payload=tokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1TokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def record_service_tokenize_with_http_info( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - tokenize_payload: V1TokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[V1TokenizeResponse]: - """Tokenize - - Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param tokenize_payload: (required) - :type tokenize_payload: V1TokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_tokenize_serialize( - vault_id=vault_id, - tokenize_payload=tokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1TokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def record_service_tokenize_without_preload_content( - self, - vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], - tokenize_payload: V1TokenizePayload, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Tokenize - - Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. - - :param vault_id: ID of the vault. (required) - :type vault_id: str - :param tokenize_payload: (required) - :type tokenize_payload: V1TokenizePayload - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._record_service_tokenize_serialize( - vault_id=vault_id, - tokenize_payload=tokenize_payload, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "V1TokenizeResponse", - '404': "object", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _record_service_tokenize_serialize( - self, - vault_id, - tokenize_payload, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[str, Union[str, bytes]] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if vault_id is not None: - _path_params['vaultID'] = vault_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if tokenize_payload is not None: - _body_params = tokenize_payload - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'Bearer' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/v1/vaults/{vaultID}/tokenize', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/skyflow/generated/rest/api_client.py b/skyflow/generated/rest/api_client.py deleted file mode 100644 index 8aa5e6a9..00000000 --- a/skyflow/generated/rest/api_client.py +++ /dev/null @@ -1,789 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import datetime -from dateutil.parser import parse -from enum import Enum -import decimal -import json -import mimetypes -import os -import re -import tempfile - -from urllib.parse import quote -from typing import Tuple, Optional, List, Dict, Union -from pydantic import SecretStr - -from skyflow.generated.rest.configuration import Configuration -from skyflow.generated.rest.api_response import ApiResponse, T as ApiResponseT -import skyflow.generated.rest.models -from skyflow.generated.rest import rest -from skyflow.generated.rest.exceptions import ( - ApiValueError, - ApiException, - BadRequestException, - UnauthorizedException, - ForbiddenException, - NotFoundException, - ServiceException -) - -RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] - -class ApiClient: - """Generic API client for OpenAPI client library builds. - - OpenAPI generic API client. This client handles the client- - server communication, and is invariant across implementations. Specifics of - the methods and models for each application are generated from the OpenAPI - templates. - - :param configuration: .Configuration object for this client - :param header_name: a header to pass when making calls to the API. - :param header_value: a header value to pass when making calls to - the API. - :param cookie: a cookie to include in the header when making calls - to the API - """ - - PRIMITIVE_TYPES = (float, bool, bytes, str, int) - NATIVE_TYPES_MAPPING = { - 'int': int, - 'long': int, # TODO remove as only py3 is supported? - 'float': float, - 'str': str, - 'bool': bool, - 'date': datetime.date, - 'datetime': datetime.datetime, - 'decimal': decimal.Decimal, - 'object': object, - } - _pool = None - - def __init__( - self, - configuration=None, - header_name=None, - header_value=None, - cookie=None - ) -> None: - # use default configuration if none is provided - if configuration is None: - configuration = Configuration.get_default() - self.configuration = configuration - - self.rest_client = rest.RESTClientObject(configuration) - self.default_headers = {} - if header_name is not None: - self.default_headers[header_name] = header_value - self.cookie = cookie - # Set default User-Agent. - self.user_agent = 'OpenAPI-Generator/1.0.0/python' - self.client_side_validation = configuration.client_side_validation - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - pass - - @property - def user_agent(self): - """User agent for this API client""" - return self.default_headers['User-Agent'] - - @user_agent.setter - def user_agent(self, value): - self.default_headers['User-Agent'] = value - - def set_default_header(self, header_name, header_value): - self.default_headers[header_name] = header_value - - - _default = None - - @classmethod - def get_default(cls): - """Return new instance of ApiClient. - - This method returns newly created, based on default constructor, - object of ApiClient class or returns a copy of default - ApiClient. - - :return: The ApiClient object. - """ - if cls._default is None: - cls._default = ApiClient() - return cls._default - - @classmethod - def set_default(cls, default): - """Set default instance of ApiClient. - - It stores default ApiClient. - - :param default: object of ApiClient. - """ - cls._default = default - - def param_serialize( - self, - method, - resource_path, - path_params=None, - query_params=None, - header_params=None, - body=None, - post_params=None, - files=None, auth_settings=None, - collection_formats=None, - _host=None, - _request_auth=None - ) -> RequestSerialized: - - """Builds the HTTP request params needed by the request. - :param method: Method to call. - :param resource_path: Path to method endpoint. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param files dict: key -> filename, value -> filepath, - for `multipart/form-data`. - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the authentication - in the spec for a single request. - :return: tuple of form (path, http_method, query_params, header_params, - body, post_params, files) - """ - - config = self.configuration - - # header parameters - header_params = header_params or {} - header_params.update(self.default_headers) - if self.cookie: - header_params['Cookie'] = self.cookie - if header_params: - header_params = self.sanitize_for_serialization(header_params) - header_params = dict( - self.parameters_to_tuples(header_params,collection_formats) - ) - - # path parameters - if path_params: - path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples( - path_params, - collection_formats - ) - for k, v in path_params: - # specified safe chars, encode everything - resource_path = resource_path.replace( - '{%s}' % k, - quote(str(v), safe=config.safe_chars_for_path_param) - ) - - # post parameters - if post_params or files: - post_params = post_params if post_params else [] - post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples( - post_params, - collection_formats - ) - if files: - post_params.extend(self.files_parameters(files)) - - # auth setting - self.update_params_for_auth( - header_params, - query_params, - auth_settings, - resource_path, - method, - body, - request_auth=_request_auth - ) - - # body - if body: - body = self.sanitize_for_serialization(body) - - # request url - if _host is None or self.configuration.ignore_operation_servers: - url = self.configuration.host + resource_path - else: - # use server/host defined in path or operation instead - url = _host + resource_path - - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - url_query = self.parameters_to_url_query( - query_params, - collection_formats - ) - url += "?" + url_query - - return method, url, header_params, body, post_params - - - def call_api( - self, - method, - url, - header_params=None, - body=None, - post_params=None, - _request_timeout=None - ) -> rest.RESTResponse: - """Makes the HTTP request (synchronous) - :param method: Method to call. - :param url: Path to method endpoint. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param _request_timeout: timeout setting for this request. - :return: RESTResponse - """ - - try: - # perform request and return response - response_data = self.rest_client.request( - method, url, - headers=header_params, - body=body, post_params=post_params, - _request_timeout=_request_timeout - ) - - except ApiException as e: - raise e - - return response_data - - def response_deserialize( - self, - response_data: rest.RESTResponse, - response_types_map: Optional[Dict[str, ApiResponseT]]=None - ) -> ApiResponse[ApiResponseT]: - """Deserializes response into an object. - :param response_data: RESTResponse object to be deserialized. - :param response_types_map: dict of response types. - :return: ApiResponse - """ - - msg = "RESTResponse.read() must be called before passing it to response_deserialize()" - assert response_data.data is not None, msg - - response_type = response_types_map.get(str(response_data.status), None) - if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: - # if not found, look for '1XX', '2XX', etc. - response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) - - # deserialize response data - response_text = None - return_data = None - try: - if response_type == "bytearray": - return_data = response_data.data - elif response_type == "file": - return_data = self.__deserialize_file(response_data) - elif response_type is not None: - match = None - content_type = response_data.getheader('content-type') - if content_type is not None: - match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) - encoding = match.group(1) if match else "utf-8" - response_text = response_data.data.decode(encoding) - return_data = self.deserialize(response_text, response_type, content_type) - finally: - if not 200 <= response_data.status <= 299: - raise ApiException.from_response( - http_resp=response_data, - body=response_text, - data=return_data, - ) - - return ApiResponse( - status_code = response_data.status, - data = return_data, - headers = response_data.getheaders(), - raw_data = response_data.data - ) - - def sanitize_for_serialization(self, obj): - """Builds a JSON POST object. - - If obj is None, return None. - If obj is SecretStr, return obj.get_secret_value() - If obj is str, int, long, float, bool, return directly. - If obj is datetime.datetime, datetime.date - convert to string in iso8601 format. - If obj is decimal.Decimal return string representation. - If obj is list, sanitize each element in the list. - If obj is dict, return the dict. - If obj is OpenAPI model, return the properties dict. - - :param obj: The data to serialize. - :return: The serialized form of data. - """ - if obj is None: - return None - elif isinstance(obj, Enum): - return obj.value - elif isinstance(obj, SecretStr): - return obj.get_secret_value() - elif isinstance(obj, self.PRIMITIVE_TYPES): - return obj - elif isinstance(obj, list): - return [ - self.sanitize_for_serialization(sub_obj) for sub_obj in obj - ] - elif isinstance(obj, tuple): - return tuple( - self.sanitize_for_serialization(sub_obj) for sub_obj in obj - ) - elif isinstance(obj, (datetime.datetime, datetime.date)): - return obj.isoformat() - elif isinstance(obj, decimal.Decimal): - return str(obj) - - elif isinstance(obj, dict): - obj_dict = obj - else: - # Convert model obj to dict except - # attributes `openapi_types`, `attribute_map` - # and attributes which value is not None. - # Convert attribute name to json key in - # model definition for request. - if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')): - obj_dict = obj.to_dict() - else: - obj_dict = obj.__dict__ - - return { - key: self.sanitize_for_serialization(val) - for key, val in obj_dict.items() - } - - def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): - """Deserializes response into an object. - - :param response: RESTResponse object to be deserialized. - :param response_type: class literal for - deserialized object, or string of class name. - :param content_type: content type of response. - - :return: deserialized object. - """ - - # fetch data from response object - if content_type is None: - try: - data = json.loads(response_text) - except ValueError: - data = response_text - elif content_type.startswith("application/json"): - if response_text == "": - data = "" - else: - data = json.loads(response_text) - elif content_type.startswith("text/plain"): - data = response_text - else: - raise ApiException( - status=0, - reason="Unsupported content type: {0}".format(content_type) - ) - - return self.__deserialize(data, response_type) - - def __deserialize(self, data, klass): - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if isinstance(klass, str): - if klass.startswith('List['): - m = re.match(r'List\[(.*)]', klass) - assert m is not None, "Malformed List type definition" - sub_kls = m.group(1) - return [self.__deserialize(sub_data, sub_kls) - for sub_data in data] - - if klass.startswith('Dict['): - m = re.match(r'Dict\[([^,]*), (.*)]', klass) - assert m is not None, "Malformed Dict type definition" - sub_kls = m.group(2) - return {k: self.__deserialize(v, sub_kls) - for k, v in data.items()} - - # convert str to class - if klass in self.NATIVE_TYPES_MAPPING: - klass = self.NATIVE_TYPES_MAPPING[klass] - else: - klass = getattr(skyflow.generated.rest.models, klass) - - if klass in self.PRIMITIVE_TYPES: - return self.__deserialize_primitive(data, klass) - elif klass == object: - return self.__deserialize_object(data) - elif klass == datetime.date: - return self.__deserialize_date(data) - elif klass == datetime.datetime: - return self.__deserialize_datetime(data) - elif klass == decimal.Decimal: - return decimal.Decimal(data) - elif issubclass(klass, Enum): - return self.__deserialize_enum(data, klass) - else: - return self.__deserialize_model(data, klass) - - def parameters_to_tuples(self, params, collection_formats): - """Get parameters as list of tuples, formatting collections. - - :param params: Parameters as dict or list of two-tuples - :param dict collection_formats: Parameter collection formats - :return: Parameters as list of tuples, collections formatted - """ - new_params: List[Tuple[str, str]] = [] - if collection_formats is None: - collection_formats = {} - for k, v in params.items() if isinstance(params, dict) else params: - if k in collection_formats: - collection_format = collection_formats[k] - if collection_format == 'multi': - new_params.extend((k, value) for value in v) - else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' - else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(str(value) for value in v))) - else: - new_params.append((k, v)) - return new_params - - def parameters_to_url_query(self, params, collection_formats): - """Get parameters as list of tuples, formatting collections. - - :param params: Parameters as dict or list of two-tuples - :param dict collection_formats: Parameter collection formats - :return: URL query string (e.g. a=Hello%20World&b=123) - """ - new_params: List[Tuple[str, str]] = [] - if collection_formats is None: - collection_formats = {} - for k, v in params.items() if isinstance(params, dict) else params: - if isinstance(v, bool): - v = str(v).lower() - if isinstance(v, (int, float)): - v = str(v) - if isinstance(v, dict): - v = json.dumps(v) - - if k in collection_formats: - collection_format = collection_formats[k] - if collection_format == 'multi': - new_params.extend((k, str(value)) for value in v) - else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' - else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(quote(str(value)) for value in v)) - ) - else: - new_params.append((k, quote(str(v)))) - - return "&".join(["=".join(map(str, item)) for item in new_params]) - - def files_parameters(self, files: Dict[str, Union[str, bytes]]): - """Builds form parameters. - - :param files: File parameters. - :return: Form parameters with files. - """ - params = [] - for k, v in files.items(): - if isinstance(v, str): - with open(v, 'rb') as f: - filename = os.path.basename(f.name) - filedata = f.read() - elif isinstance(v, bytes): - filename = k - filedata = v - else: - raise ValueError("Unsupported file value") - mimetype = ( - mimetypes.guess_type(filename)[0] - or 'application/octet-stream' - ) - params.append( - tuple([k, tuple([filename, filedata, mimetype])]) - ) - return params - - def select_header_accept(self, accepts: List[str]) -> Optional[str]: - """Returns `Accept` based on an array of accepts provided. - - :param accepts: List of headers. - :return: Accept (e.g. application/json). - """ - if not accepts: - return None - - for accept in accepts: - if re.search('json', accept, re.IGNORECASE): - return accept - - return accepts[0] - - def select_header_content_type(self, content_types): - """Returns `Content-Type` based on an array of content_types provided. - - :param content_types: List of content-types. - :return: Content-Type (e.g. application/json). - """ - if not content_types: - return None - - for content_type in content_types: - if re.search('json', content_type, re.IGNORECASE): - return content_type - - return content_types[0] - - def update_params_for_auth( - self, - headers, - queries, - auth_settings, - resource_path, - method, - body, - request_auth=None - ) -> None: - """Updates header and query params based on authentication setting. - - :param headers: Header parameters dict to be updated. - :param queries: Query parameters tuple list to be updated. - :param auth_settings: Authentication setting identifiers list. - :resource_path: A string representation of the HTTP request resource path. - :method: A string representation of the HTTP request method. - :body: A object representing the body of the HTTP request. - The object type is the return value of sanitize_for_serialization(). - :param request_auth: if set, the provided settings will - override the token in the configuration. - """ - if not auth_settings: - return - - if request_auth: - self._apply_auth_params( - headers, - queries, - resource_path, - method, - body, - request_auth - ) - else: - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - self._apply_auth_params( - headers, - queries, - resource_path, - method, - body, - auth_setting - ) - - def _apply_auth_params( - self, - headers, - queries, - resource_path, - method, - body, - auth_setting - ) -> None: - """Updates the request parameters based on a single auth_setting - - :param headers: Header parameters dict to be updated. - :param queries: Query parameters tuple list to be updated. - :resource_path: A string representation of the HTTP request resource path. - :method: A string representation of the HTTP request method. - :body: A object representing the body of the HTTP request. - The object type is the return value of sanitize_for_serialization(). - :param auth_setting: auth settings for the endpoint - """ - if auth_setting['in'] == 'cookie': - headers['Cookie'] = auth_setting['value'] - elif auth_setting['in'] == 'header': - if auth_setting['type'] != 'http-signature': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - queries.append((auth_setting['key'], auth_setting['value'])) - else: - raise ApiValueError( - 'Authentication token must be in `query` or `header`' - ) - - def __deserialize_file(self, response): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - handle file downloading - save response body into a tmp file and return the instance - - :param response: RESTResponse. - :return: file path. - """ - fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - content_disposition = response.getheader("Content-Disposition") - if content_disposition: - m = re.search( - r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition - ) - assert m is not None, "Unexpected 'content-disposition' header value" - filename = m.group(1) - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - f.write(response.data) - - return path - - def __deserialize_primitive(self, data, klass): - """Deserializes string to primitive type. - - :param data: str. - :param klass: class literal. - - :return: int, long, float, str, bool. - """ - try: - return klass(data) - except UnicodeEncodeError: - return str(data) - except TypeError: - return data - - def __deserialize_object(self, value): - """Return an original value. - - :return: object. - """ - return value - - def __deserialize_date(self, string): - """Deserializes string to date. - - :param string: str. - :return: date. - """ - try: - return parse(string).date() - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason="Failed to parse `{0}` as date object".format(string) - ) - - def __deserialize_datetime(self, string): - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :return: datetime. - """ - try: - return parse(string) - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason=( - "Failed to parse `{0}` as datetime object" - .format(string) - ) - ) - - def __deserialize_enum(self, data, klass): - """Deserializes primitive type to enum. - - :param data: primitive type. - :param klass: class literal. - :return: enum value. - """ - try: - return klass(data) - except ValueError: - raise rest.ApiException( - status=0, - reason=( - "Failed to parse `{0}` as `{1}`" - .format(data, klass) - ) - ) - - def __deserialize_model(self, data, klass): - """Deserializes list or dict to model. - - :param data: dict, list. - :param klass: class literal. - :return: model object. - """ - - return klass.from_dict(data) diff --git a/skyflow/generated/rest/api_response.py b/skyflow/generated/rest/api_response.py deleted file mode 100644 index 9bc7c11f..00000000 --- a/skyflow/generated/rest/api_response.py +++ /dev/null @@ -1,21 +0,0 @@ -"""API response object.""" - -from __future__ import annotations -from typing import Optional, Generic, Mapping, TypeVar -from pydantic import Field, StrictInt, StrictBytes, BaseModel - -T = TypeVar("T") - -class ApiResponse(BaseModel, Generic[T]): - """ - API response object - """ - - status_code: StrictInt = Field(description="HTTP status code") - headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") - data: T = Field(description="Deserialized data given the data type") - raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") - - model_config = { - "arbitrary_types_allowed": True - } diff --git a/skyflow/generated/rest/audit/__init__.py b/skyflow/generated/rest/audit/__init__.py new file mode 100644 index 00000000..38fe28d3 --- /dev/null +++ b/skyflow/generated/rest/audit/__init__.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +from .types import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, + AuditServiceListAuditEventsRequestFilterOpsContextAccessType, + AuditServiceListAuditEventsRequestFilterOpsContextActorType, + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, + AuditServiceListAuditEventsRequestFilterOpsResourceType, + AuditServiceListAuditEventsRequestSortOpsOrderBy, +) + +__all__ = [ + "AuditServiceListAuditEventsRequestFilterOpsActionType", + "AuditServiceListAuditEventsRequestFilterOpsContextAccessType", + "AuditServiceListAuditEventsRequestFilterOpsContextActorType", + "AuditServiceListAuditEventsRequestFilterOpsContextAuthMode", + "AuditServiceListAuditEventsRequestFilterOpsResourceType", + "AuditServiceListAuditEventsRequestSortOpsOrderBy", +] diff --git a/skyflow/generated/rest/audit/client.py b/skyflow/generated/rest/audit/client.py new file mode 100644 index 00000000..3b4d329a --- /dev/null +++ b/skyflow/generated/rest/audit/client.py @@ -0,0 +1,509 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.client_wrapper import SyncClientWrapper +import typing +from .types.audit_service_list_audit_events_request_filter_ops_context_actor_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextActorType, +) +from .types.audit_service_list_audit_events_request_filter_ops_context_access_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextAccessType, +) +from .types.audit_service_list_audit_events_request_filter_ops_context_auth_mode import ( + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, +) +from .types.audit_service_list_audit_events_request_filter_ops_action_type import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, +) +from .types.audit_service_list_audit_events_request_filter_ops_resource_type import ( + AuditServiceListAuditEventsRequestFilterOpsResourceType, +) +from .types.audit_service_list_audit_events_request_sort_ops_order_by import ( + AuditServiceListAuditEventsRequestSortOpsOrderBy, +) +from ..core.request_options import RequestOptions +from ..types.v_1_audit_response import V1AuditResponse +from ..core.pydantic_utilities import parse_obj_as +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper + + +class AuditClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def audit_service_list_audit_events( + self, + *, + filter_ops_account_id: str, + filter_ops_context_change_id: typing.Optional[str] = None, + filter_ops_context_request_id: typing.Optional[str] = None, + filter_ops_context_trace_id: typing.Optional[str] = None, + filter_ops_context_session_id: typing.Optional[str] = None, + filter_ops_context_actor: typing.Optional[str] = None, + filter_ops_context_actor_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextActorType + ] = None, + filter_ops_context_access_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAccessType + ] = None, + filter_ops_context_ip_address: typing.Optional[str] = None, + filter_ops_context_origin: typing.Optional[str] = None, + filter_ops_context_auth_mode: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode + ] = None, + filter_ops_context_jwt_id: typing.Optional[str] = None, + filter_ops_context_bearer_token_context_id: typing.Optional[str] = None, + filter_ops_parent_account_id: typing.Optional[str] = None, + filter_ops_workspace_id: typing.Optional[str] = None, + filter_ops_vault_id: typing.Optional[str] = None, + filter_ops_resource_i_ds: typing.Optional[str] = None, + filter_ops_action_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] = None, + filter_ops_resource_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] = None, + filter_ops_tags: typing.Optional[str] = None, + filter_ops_response_code: typing.Optional[int] = None, + filter_ops_start_time: typing.Optional[str] = None, + filter_ops_end_time: typing.Optional[str] = None, + filter_ops_api_name: typing.Optional[str] = None, + filter_ops_response_message: typing.Optional[str] = None, + filter_ops_http_method: typing.Optional[str] = None, + filter_ops_http_uri: typing.Optional[str] = None, + sort_ops_sort_by: typing.Optional[str] = None, + sort_ops_order_by: typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] = None, + after_ops_timestamp: typing.Optional[str] = None, + after_ops_change_id: typing.Optional[str] = None, + limit: typing.Optional[int] = None, + offset: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1AuditResponse: + """ + Lists audit events that match query parameters. + + Parameters + ---------- + filter_ops_account_id : str + Resources with the specified account ID. + + filter_ops_context_change_id : typing.Optional[str] + ID for the audit event. + + filter_ops_context_request_id : typing.Optional[str] + ID for the request that caused the event. + + filter_ops_context_trace_id : typing.Optional[str] + ID for the request set by the service that received the request. + + filter_ops_context_session_id : typing.Optional[str] + ID for the session in which the request was sent. + + filter_ops_context_actor : typing.Optional[str] + Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + + filter_ops_context_actor_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextActorType] + Type of member who sent the request. + + filter_ops_context_access_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAccessType] + Type of access for the request. + + filter_ops_context_ip_address : typing.Optional[str] + IP Address of the client that made the request. + + filter_ops_context_origin : typing.Optional[str] + HTTP Origin request header (including scheme, hostname, and port) of the request. + + filter_ops_context_auth_mode : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAuthMode] + Authentication mode the `actor` used. + + filter_ops_context_jwt_id : typing.Optional[str] + ID of the JWT token. + + filter_ops_context_bearer_token_context_id : typing.Optional[str] + Embedded User Context. + + filter_ops_parent_account_id : typing.Optional[str] + Resources with the specified parent account ID. + + filter_ops_workspace_id : typing.Optional[str] + Resources with the specified workspace ID. + + filter_ops_vault_id : typing.Optional[str] + Resources with the specified vault ID. + + filter_ops_resource_i_ds : typing.Optional[str] + Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of "\/\". For example, "VAULT/12345, USER/67890". + + filter_ops_action_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] + Events with the specified action type. + + filter_ops_resource_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] + Resources with the specified type. + + filter_ops_tags : typing.Optional[str] + Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, "login, get". + + filter_ops_response_code : typing.Optional[int] + HTTP response code of the request. + + filter_ops_start_time : typing.Optional[str] + Start timestamp for the query, in SQL format. + + filter_ops_end_time : typing.Optional[str] + End timestamp for the query, in SQL format. + + filter_ops_api_name : typing.Optional[str] + Name of the API called in the request. + + filter_ops_response_message : typing.Optional[str] + Response message of the request. + + filter_ops_http_method : typing.Optional[str] + HTTP method of the request. + + filter_ops_http_uri : typing.Optional[str] + HTTP URI of the request. + + sort_ops_sort_by : typing.Optional[str] + Fully-qualified field by which to sort results. Field names should be in camel case (for example, "capitalization.camelCase"). + + sort_ops_order_by : typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] + Ascending or descending ordering of results. + + after_ops_timestamp : typing.Optional[str] + Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + after_ops_change_id : typing.Optional[str] + Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + limit : typing.Optional[int] + Number of results to return. + + offset : typing.Optional[int] + Record position at which to start returning results. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1AuditResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.audit.audit_service_list_audit_events( + filter_ops_account_id="filterOps.accountID", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "v1/audit/events", + method="GET", + params={ + "filterOps.context.changeID": filter_ops_context_change_id, + "filterOps.context.requestID": filter_ops_context_request_id, + "filterOps.context.traceID": filter_ops_context_trace_id, + "filterOps.context.sessionID": filter_ops_context_session_id, + "filterOps.context.actor": filter_ops_context_actor, + "filterOps.context.actorType": filter_ops_context_actor_type, + "filterOps.context.accessType": filter_ops_context_access_type, + "filterOps.context.ipAddress": filter_ops_context_ip_address, + "filterOps.context.origin": filter_ops_context_origin, + "filterOps.context.authMode": filter_ops_context_auth_mode, + "filterOps.context.jwtID": filter_ops_context_jwt_id, + "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, + "filterOps.parentAccountID": filter_ops_parent_account_id, + "filterOps.accountID": filter_ops_account_id, + "filterOps.workspaceID": filter_ops_workspace_id, + "filterOps.vaultID": filter_ops_vault_id, + "filterOps.resourceIDs": filter_ops_resource_i_ds, + "filterOps.actionType": filter_ops_action_type, + "filterOps.resourceType": filter_ops_resource_type, + "filterOps.tags": filter_ops_tags, + "filterOps.responseCode": filter_ops_response_code, + "filterOps.startTime": filter_ops_start_time, + "filterOps.endTime": filter_ops_end_time, + "filterOps.apiName": filter_ops_api_name, + "filterOps.responseMessage": filter_ops_response_message, + "filterOps.httpMethod": filter_ops_http_method, + "filterOps.httpURI": filter_ops_http_uri, + "sortOps.sortBy": sort_ops_sort_by, + "sortOps.orderBy": sort_ops_order_by, + "afterOps.timestamp": after_ops_timestamp, + "afterOps.changeID": after_ops_change_id, + "limit": limit, + "offset": offset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1AuditResponse, + parse_obj_as( + type_=V1AuditResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncAuditClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def audit_service_list_audit_events( + self, + *, + filter_ops_account_id: str, + filter_ops_context_change_id: typing.Optional[str] = None, + filter_ops_context_request_id: typing.Optional[str] = None, + filter_ops_context_trace_id: typing.Optional[str] = None, + filter_ops_context_session_id: typing.Optional[str] = None, + filter_ops_context_actor: typing.Optional[str] = None, + filter_ops_context_actor_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextActorType + ] = None, + filter_ops_context_access_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAccessType + ] = None, + filter_ops_context_ip_address: typing.Optional[str] = None, + filter_ops_context_origin: typing.Optional[str] = None, + filter_ops_context_auth_mode: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode + ] = None, + filter_ops_context_jwt_id: typing.Optional[str] = None, + filter_ops_context_bearer_token_context_id: typing.Optional[str] = None, + filter_ops_parent_account_id: typing.Optional[str] = None, + filter_ops_workspace_id: typing.Optional[str] = None, + filter_ops_vault_id: typing.Optional[str] = None, + filter_ops_resource_i_ds: typing.Optional[str] = None, + filter_ops_action_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] = None, + filter_ops_resource_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] = None, + filter_ops_tags: typing.Optional[str] = None, + filter_ops_response_code: typing.Optional[int] = None, + filter_ops_start_time: typing.Optional[str] = None, + filter_ops_end_time: typing.Optional[str] = None, + filter_ops_api_name: typing.Optional[str] = None, + filter_ops_response_message: typing.Optional[str] = None, + filter_ops_http_method: typing.Optional[str] = None, + filter_ops_http_uri: typing.Optional[str] = None, + sort_ops_sort_by: typing.Optional[str] = None, + sort_ops_order_by: typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] = None, + after_ops_timestamp: typing.Optional[str] = None, + after_ops_change_id: typing.Optional[str] = None, + limit: typing.Optional[int] = None, + offset: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1AuditResponse: + """ + Lists audit events that match query parameters. + + Parameters + ---------- + filter_ops_account_id : str + Resources with the specified account ID. + + filter_ops_context_change_id : typing.Optional[str] + ID for the audit event. + + filter_ops_context_request_id : typing.Optional[str] + ID for the request that caused the event. + + filter_ops_context_trace_id : typing.Optional[str] + ID for the request set by the service that received the request. + + filter_ops_context_session_id : typing.Optional[str] + ID for the session in which the request was sent. + + filter_ops_context_actor : typing.Optional[str] + Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + + filter_ops_context_actor_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextActorType] + Type of member who sent the request. + + filter_ops_context_access_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAccessType] + Type of access for the request. + + filter_ops_context_ip_address : typing.Optional[str] + IP Address of the client that made the request. + + filter_ops_context_origin : typing.Optional[str] + HTTP Origin request header (including scheme, hostname, and port) of the request. + + filter_ops_context_auth_mode : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAuthMode] + Authentication mode the `actor` used. + + filter_ops_context_jwt_id : typing.Optional[str] + ID of the JWT token. + + filter_ops_context_bearer_token_context_id : typing.Optional[str] + Embedded User Context. + + filter_ops_parent_account_id : typing.Optional[str] + Resources with the specified parent account ID. + + filter_ops_workspace_id : typing.Optional[str] + Resources with the specified workspace ID. + + filter_ops_vault_id : typing.Optional[str] + Resources with the specified vault ID. + + filter_ops_resource_i_ds : typing.Optional[str] + Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of "\/\". For example, "VAULT/12345, USER/67890". + + filter_ops_action_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] + Events with the specified action type. + + filter_ops_resource_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] + Resources with the specified type. + + filter_ops_tags : typing.Optional[str] + Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, "login, get". + + filter_ops_response_code : typing.Optional[int] + HTTP response code of the request. + + filter_ops_start_time : typing.Optional[str] + Start timestamp for the query, in SQL format. + + filter_ops_end_time : typing.Optional[str] + End timestamp for the query, in SQL format. + + filter_ops_api_name : typing.Optional[str] + Name of the API called in the request. + + filter_ops_response_message : typing.Optional[str] + Response message of the request. + + filter_ops_http_method : typing.Optional[str] + HTTP method of the request. + + filter_ops_http_uri : typing.Optional[str] + HTTP URI of the request. + + sort_ops_sort_by : typing.Optional[str] + Fully-qualified field by which to sort results. Field names should be in camel case (for example, "capitalization.camelCase"). + + sort_ops_order_by : typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] + Ascending or descending ordering of results. + + after_ops_timestamp : typing.Optional[str] + Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + after_ops_change_id : typing.Optional[str] + Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + limit : typing.Optional[int] + Number of results to return. + + offset : typing.Optional[int] + Record position at which to start returning results. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1AuditResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.audit.audit_service_list_audit_events( + filter_ops_account_id="filterOps.accountID", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/audit/events", + method="GET", + params={ + "filterOps.context.changeID": filter_ops_context_change_id, + "filterOps.context.requestID": filter_ops_context_request_id, + "filterOps.context.traceID": filter_ops_context_trace_id, + "filterOps.context.sessionID": filter_ops_context_session_id, + "filterOps.context.actor": filter_ops_context_actor, + "filterOps.context.actorType": filter_ops_context_actor_type, + "filterOps.context.accessType": filter_ops_context_access_type, + "filterOps.context.ipAddress": filter_ops_context_ip_address, + "filterOps.context.origin": filter_ops_context_origin, + "filterOps.context.authMode": filter_ops_context_auth_mode, + "filterOps.context.jwtID": filter_ops_context_jwt_id, + "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, + "filterOps.parentAccountID": filter_ops_parent_account_id, + "filterOps.accountID": filter_ops_account_id, + "filterOps.workspaceID": filter_ops_workspace_id, + "filterOps.vaultID": filter_ops_vault_id, + "filterOps.resourceIDs": filter_ops_resource_i_ds, + "filterOps.actionType": filter_ops_action_type, + "filterOps.resourceType": filter_ops_resource_type, + "filterOps.tags": filter_ops_tags, + "filterOps.responseCode": filter_ops_response_code, + "filterOps.startTime": filter_ops_start_time, + "filterOps.endTime": filter_ops_end_time, + "filterOps.apiName": filter_ops_api_name, + "filterOps.responseMessage": filter_ops_response_message, + "filterOps.httpMethod": filter_ops_http_method, + "filterOps.httpURI": filter_ops_http_uri, + "sortOps.sortBy": sort_ops_sort_by, + "sortOps.orderBy": sort_ops_order_by, + "afterOps.timestamp": after_ops_timestamp, + "afterOps.changeID": after_ops_change_id, + "limit": limit, + "offset": offset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1AuditResponse, + parse_obj_as( + type_=V1AuditResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/audit/types/__init__.py b/skyflow/generated/rest/audit/types/__init__.py new file mode 100644 index 00000000..39f38866 --- /dev/null +++ b/skyflow/generated/rest/audit/types/__init__.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from .audit_service_list_audit_events_request_filter_ops_action_type import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, +) +from .audit_service_list_audit_events_request_filter_ops_context_access_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextAccessType, +) +from .audit_service_list_audit_events_request_filter_ops_context_actor_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextActorType, +) +from .audit_service_list_audit_events_request_filter_ops_context_auth_mode import ( + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, +) +from .audit_service_list_audit_events_request_filter_ops_resource_type import ( + AuditServiceListAuditEventsRequestFilterOpsResourceType, +) +from .audit_service_list_audit_events_request_sort_ops_order_by import AuditServiceListAuditEventsRequestSortOpsOrderBy + +__all__ = [ + "AuditServiceListAuditEventsRequestFilterOpsActionType", + "AuditServiceListAuditEventsRequestFilterOpsContextAccessType", + "AuditServiceListAuditEventsRequestFilterOpsContextActorType", + "AuditServiceListAuditEventsRequestFilterOpsContextAuthMode", + "AuditServiceListAuditEventsRequestFilterOpsResourceType", + "AuditServiceListAuditEventsRequestSortOpsOrderBy", +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_action_type.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_action_type.py new file mode 100644 index 00000000..24df22e1 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_action_type.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestFilterOpsActionType = typing.Union[ + typing.Literal[ + "NONE", + "ASSIGN", + "CREATE", + "DELETE", + "EXECUTE", + "LIST", + "READ", + "UNASSIGN", + "UPDATE", + "VALIDATE", + "LOGIN", + "ROTATE", + "SCHEDULEROTATION", + "SCHEDULEROTATIONALERT", + "IMPORT", + "GETIMPORTPARAMETERS", + "PING", + "GETCLOUDPROVIDER", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_access_type.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_access_type.py new file mode 100644 index 00000000..1349c539 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_access_type.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestFilterOpsContextAccessType = typing.Union[ + typing.Literal["ACCESS_NONE", "API", "SQL", "OKTA_LOGIN"], typing.Any +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_actor_type.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_actor_type.py new file mode 100644 index 00000000..4a5a96f1 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_actor_type.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestFilterOpsContextActorType = typing.Union[ + typing.Literal["NONE", "USER", "SERVICE_ACCOUNT"], typing.Any +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_auth_mode.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_auth_mode.py new file mode 100644 index 00000000..f542f677 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_context_auth_mode.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestFilterOpsContextAuthMode = typing.Union[ + typing.Literal["AUTH_NONE", "OKTA_JWT", "SERVICE_ACCOUNT_JWT", "PAT_JWT"], typing.Any +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_resource_type.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_resource_type.py new file mode 100644 index 00000000..610aa1e6 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_filter_ops_resource_type.py @@ -0,0 +1,39 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestFilterOpsResourceType = typing.Union[ + typing.Literal[ + "NONE_API", + "ACCOUNT", + "AUDIT", + "BASE_DATA_TYPE", + "FIELD_TEMPLATE", + "FILE", + "KEY", + "POLICY", + "PROTO_PARSE", + "RECORD", + "ROLE", + "RULE", + "SECRET", + "SERVICE_ACCOUNT", + "TOKEN", + "USER", + "VAULT", + "VAULT_TEMPLATE", + "WORKSPACE", + "TABLE", + "POLICY_TEMPLATE", + "MEMBER", + "TAG", + "CONNECTION", + "MIGRATION", + "SCHEDULED_JOB", + "JOB", + "COLUMN_NAME", + "NETWORK_TOKEN", + "SUBSCRIPTION", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_sort_ops_order_by.py b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_sort_ops_order_by.py new file mode 100644 index 00000000..48a79484 --- /dev/null +++ b/skyflow/generated/rest/audit/types/audit_service_list_audit_events_request_sort_ops_order_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditServiceListAuditEventsRequestSortOpsOrderBy = typing.Union[typing.Literal["ASCENDING", "DESCENDING"], typing.Any] diff --git a/skyflow/generated/rest/authentication/__init__.py b/skyflow/generated/rest/authentication/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/skyflow/generated/rest/authentication/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/skyflow/generated/rest/authentication/client.py b/skyflow/generated/rest/authentication/client.py new file mode 100644 index 00000000..c4825e27 --- /dev/null +++ b/skyflow/generated/rest/authentication/client.py @@ -0,0 +1,264 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.v_1_get_auth_token_response import V1GetAuthTokenResponse +from ..core.pydantic_utilities import parse_obj_as +from ..errors.bad_request_error import BadRequestError +from ..errors.unauthorized_error import UnauthorizedError +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class AuthenticationClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def authentication_service_get_auth_token( + self, + *, + grant_type: str, + assertion: str, + subject_token: typing.Optional[str] = OMIT, + subject_token_type: typing.Optional[str] = OMIT, + requested_token_use: typing.Optional[str] = OMIT, + scope: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetAuthTokenResponse: + """ +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + Parameters + ---------- + grant_type : str + Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`. + + assertion : str + User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
+ + subject_token : typing.Optional[str] + Subject token. + + subject_token_type : typing.Optional[str] + Subject token type. + + requested_token_use : typing.Optional[str] + Token use type. Either `delegation` or `impersonation`. + + scope : typing.Optional[str] + Subset of available roles to associate with the requested token. Uses the format "role:\ role:\". + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetAuthTokenResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.authentication.authentication_service_get_auth_token( + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "v1/auth/sa/oauth/token", + method="POST", + json={ + "grant_type": grant_type, + "assertion": assertion, + "subject_token": subject_token, + "subject_token_type": subject_token_type, + "requested_token_use": requested_token_use, + "scope": scope, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetAuthTokenResponse, + parse_obj_as( + type_=V1GetAuthTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncAuthenticationClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def authentication_service_get_auth_token( + self, + *, + grant_type: str, + assertion: str, + subject_token: typing.Optional[str] = OMIT, + subject_token_type: typing.Optional[str] = OMIT, + requested_token_use: typing.Optional[str] = OMIT, + scope: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetAuthTokenResponse: + """ +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + Parameters + ---------- + grant_type : str + Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`. + + assertion : str + User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
+ + subject_token : typing.Optional[str] + Subject token. + + subject_token_type : typing.Optional[str] + Subject token type. + + requested_token_use : typing.Optional[str] + Token use type. Either `delegation` or `impersonation`. + + scope : typing.Optional[str] + Subset of available roles to associate with the requested token. Uses the format "role:\ role:\". + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetAuthTokenResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.authentication.authentication_service_get_auth_token( + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/auth/sa/oauth/token", + method="POST", + json={ + "grant_type": grant_type, + "assertion": assertion, + "subject_token": subject_token, + "subject_token_type": subject_token_type, + "requested_token_use": requested_token_use, + "scope": scope, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetAuthTokenResponse, + parse_obj_as( + type_=V1GetAuthTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/bin_lookup/__init__.py b/skyflow/generated/rest/bin_lookup/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/skyflow/generated/rest/bin_lookup/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/skyflow/generated/rest/bin_lookup/client.py b/skyflow/generated/rest/bin_lookup/client.py new file mode 100644 index 00000000..58d30c51 --- /dev/null +++ b/skyflow/generated/rest/bin_lookup/client.py @@ -0,0 +1,204 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ..core.client_wrapper import SyncClientWrapper +from ..types.v_1_vault_schema_config import V1VaultSchemaConfig +from ..core.request_options import RequestOptions +from ..types.v_1_bin_list_response import V1BinListResponse +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.pydantic_utilities import parse_obj_as +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class BinLookupClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def bin_list_service_list_cards_of_bin( + self, + *, + fields: typing.Optional[typing.Sequence[str]] = OMIT, + bin: typing.Optional[str] = OMIT, + vault_schema_config: typing.Optional[V1VaultSchemaConfig] = OMIT, + skyflow_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BinListResponse: + """ + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + Parameters + ---------- + fields : typing.Optional[typing.Sequence[str]] + Fields to return. If not specified, all fields are returned. + + bin : typing.Optional[str] + BIN of the card. + + vault_schema_config : typing.Optional[V1VaultSchemaConfig] + + skyflow_id : typing.Optional[str] + skyflow_id of the record. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BinListResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.bin_lookup.bin_list_service_list_cards_of_bin( + bin="012345", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "v1/card_lookup", + method="POST", + json={ + "fields": fields, + "BIN": bin, + "vault_schema_config": convert_and_respect_annotation_metadata( + object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" + ), + "skyflow_id": skyflow_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BinListResponse, + parse_obj_as( + type_=V1BinListResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncBinLookupClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def bin_list_service_list_cards_of_bin( + self, + *, + fields: typing.Optional[typing.Sequence[str]] = OMIT, + bin: typing.Optional[str] = OMIT, + vault_schema_config: typing.Optional[V1VaultSchemaConfig] = OMIT, + skyflow_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BinListResponse: + """ + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + Parameters + ---------- + fields : typing.Optional[typing.Sequence[str]] + Fields to return. If not specified, all fields are returned. + + bin : typing.Optional[str] + BIN of the card. + + vault_schema_config : typing.Optional[V1VaultSchemaConfig] + + skyflow_id : typing.Optional[str] + skyflow_id of the record. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BinListResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.bin_lookup.bin_list_service_list_cards_of_bin( + bin="012345", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/card_lookup", + method="POST", + json={ + "fields": fields, + "BIN": bin, + "vault_schema_config": convert_and_respect_annotation_metadata( + object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" + ), + "skyflow_id": skyflow_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BinListResponse, + parse_obj_as( + type_=V1BinListResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/client.py b/skyflow/generated/rest/client.py new file mode 100644 index 00000000..7064d444 --- /dev/null +++ b/skyflow/generated/rest/client.py @@ -0,0 +1,160 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from .environment import SkyflowEnvironment +import httpx +from .core.client_wrapper import SyncClientWrapper +from .audit.client import AuditClient +from .bin_lookup.client import BinLookupClient +from .records.client import RecordsClient +from .tokens.client import TokensClient +from .query.client import QueryClient +from .authentication.client import AuthenticationClient +from .core.client_wrapper import AsyncClientWrapper +from .audit.client import AsyncAuditClient +from .bin_lookup.client import AsyncBinLookupClient +from .records.client import AsyncRecordsClient +from .tokens.client import AsyncTokensClient +from .query.client import AsyncQueryClient +from .authentication.client import AsyncAuthenticationClient + + +class Skyflow: + """ + Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions. + + Parameters + ---------- + base_url : typing.Optional[str] + The base url to use for requests from the client. + + environment : SkyflowEnvironment + The environment to use for requests from the client. from .environment import SkyflowEnvironment + + + + Defaults to SkyflowEnvironment.PRODUCTION + + + + token : typing.Union[str, typing.Callable[[], str]] + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. + + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. + + httpx_client : typing.Optional[httpx.Client] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + """ + + def __init__( + self, + *, + base_url: typing.Optional[str] = None, + environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, + token: typing.Union[str, typing.Callable[[], str]], + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, + httpx_client: typing.Optional[httpx.Client] = None, + ): + _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + self._client_wrapper = SyncClientWrapper( + base_url=_get_base_url(base_url=base_url, environment=environment), + token=token, + httpx_client=httpx_client + if httpx_client is not None + else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.Client(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, + ) + self.audit = AuditClient(client_wrapper=self._client_wrapper) + self.bin_lookup = BinLookupClient(client_wrapper=self._client_wrapper) + self.records = RecordsClient(client_wrapper=self._client_wrapper) + self.tokens = TokensClient(client_wrapper=self._client_wrapper) + self.query = QueryClient(client_wrapper=self._client_wrapper) + self.authentication = AuthenticationClient(client_wrapper=self._client_wrapper) + + +class AsyncSkyflow: + """ + Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions. + + Parameters + ---------- + base_url : typing.Optional[str] + The base url to use for requests from the client. + + environment : SkyflowEnvironment + The environment to use for requests from the client. from .environment import SkyflowEnvironment + + + + Defaults to SkyflowEnvironment.PRODUCTION + + + + token : typing.Union[str, typing.Callable[[], str]] + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. + + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. + + httpx_client : typing.Optional[httpx.AsyncClient] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. + + Examples + -------- + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + """ + + def __init__( + self, + *, + base_url: typing.Optional[str] = None, + environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, + token: typing.Union[str, typing.Callable[[], str]], + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, + httpx_client: typing.Optional[httpx.AsyncClient] = None, + ): + _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + self._client_wrapper = AsyncClientWrapper( + base_url=_get_base_url(base_url=base_url, environment=environment), + token=token, + httpx_client=httpx_client + if httpx_client is not None + else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.AsyncClient(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, + ) + self.audit = AsyncAuditClient(client_wrapper=self._client_wrapper) + self.bin_lookup = AsyncBinLookupClient(client_wrapper=self._client_wrapper) + self.records = AsyncRecordsClient(client_wrapper=self._client_wrapper) + self.tokens = AsyncTokensClient(client_wrapper=self._client_wrapper) + self.query = AsyncQueryClient(client_wrapper=self._client_wrapper) + self.authentication = AsyncAuthenticationClient(client_wrapper=self._client_wrapper) + + +def _get_base_url(*, base_url: typing.Optional[str] = None, environment: SkyflowEnvironment) -> str: + if base_url is not None: + return base_url + elif environment is not None: + return environment.value + else: + raise Exception("Please pass in either base_url or environment to construct the client") diff --git a/skyflow/generated/rest/configuration.py b/skyflow/generated/rest/configuration.py deleted file mode 100644 index 5d983650..00000000 --- a/skyflow/generated/rest/configuration.py +++ /dev/null @@ -1,464 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import copy -import logging -from logging import FileHandler -import multiprocessing -import sys -from typing import Optional -import urllib3 - -import http.client as httplib - -JSON_SCHEMA_VALIDATION_KEYWORDS = { - 'multipleOf', 'maximum', 'exclusiveMaximum', - 'minimum', 'exclusiveMinimum', 'maxLength', - 'minLength', 'pattern', 'maxItems', 'minItems' -} - -class Configuration: - """This class contains various settings of the API client. - - :param host: Base url. - :param ignore_operation_servers - Boolean to ignore operation servers for the API client. - Config will use `host` as the base url regardless of the operation servers. - :param api_key: Dict to store API key(s). - Each entry in the dict specifies an API key. - The dict key is the name of the security scheme in the OAS specification. - The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer). - The dict key is the name of the security scheme in the OAS specification. - The dict value is an API key prefix when generating the auth data. - :param username: Username for HTTP basic authentication. - :param password: Password for HTTP basic authentication. - :param access_token: Access token. - :param server_index: Index to servers configuration. - :param server_variables: Mapping with string values to replace variables in - templated server configuration. The validation of enums is performed for - variables with defined enum values before. - :param server_operation_index: Mapping from operation ID to an index to server - configuration. - :param server_operation_variables: Mapping from operation ID to a mapping with - string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum - values before. - :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format. - :param retries: Number of retries for API requests. - - :Example: - """ - - _default = None - - def __init__(self, host=None, - api_key=None, api_key_prefix=None, - username=None, password=None, - access_token=None, - server_index=None, server_variables=None, - server_operation_index=None, server_operation_variables=None, - ignore_operation_servers=False, - ssl_ca_cert=None, - retries=None, - *, - debug: Optional[bool] = None - ) -> None: - """Constructor - """ - self._base_path = "https://identifier.vault.skyflowapis.com" if host is None else host - """Default Base url - """ - self.server_index = 0 if server_index is None and host is None else server_index - self.server_operation_index = server_operation_index or {} - """Default server index - """ - self.server_variables = server_variables or {} - self.server_operation_variables = server_operation_variables or {} - """Default server variables - """ - self.ignore_operation_servers = ignore_operation_servers - """Ignore operation servers - """ - self.temp_folder_path = None - """Temp file folder for downloading files - """ - # Authentication Settings - self.api_key = {} - if api_key: - self.api_key = api_key - """dict to store API key(s) - """ - self.api_key_prefix = {} - if api_key_prefix: - self.api_key_prefix = api_key_prefix - """dict to store API prefix (e.g. Bearer) - """ - self.refresh_api_key_hook = None - """function hook to refresh API key if expired - """ - self.username = username - """Username for HTTP basic authentication - """ - self.password = password - """Password for HTTP basic authentication - """ - self.access_token = access_token - """Access token - """ - self.logger = {} - """Logging Settings - """ - self.logger["package_logger"] = logging.getLogger("skyflow.generated.rest") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - self.logger_format = '%(asctime)s %(levelname)s %(message)s' - """Log format - """ - self.logger_stream_handler = None - """Log stream handler - """ - self.logger_file_handler: Optional[FileHandler] = None - """Log file handler - """ - self.logger_file = None - """Debug file location - """ - if debug is not None: - self.debug = debug - else: - self.__debug = False - """Debug switch - """ - - self.verify_ssl = True - """SSL/TLS verification - Set this to false to skip verifying SSL certificate when calling API - from https server. - """ - self.ssl_ca_cert = ssl_ca_cert - """Set this to customize the certificate file to verify the peer. - """ - self.cert_file = None - """client certificate file - """ - self.key_file = None - """client key file - """ - self.assert_hostname = None - """Set this to True/False to enable/disable SSL hostname verification. - """ - self.tls_server_name = None - """SSL/TLS Server Name Indication (SNI) - Set this to the SNI value expected by the server. - """ - - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - """urllib3 connection pool's maximum number of connections saved - per pool. urllib3 uses 1 connection as default value, but this is - not the best value when you are making a lot of possibly parallel - requests to the same host, which is often the case here. - cpu_count * 5 is used as default value to increase performance. - """ - - self.proxy: Optional[str] = None - """Proxy URL - """ - self.proxy_headers = None - """Proxy headers - """ - self.safe_chars_for_path_param = '' - """Safe chars for path_param - """ - self.retries = retries - """Adding retries to override urllib3 default value 3 - """ - # Enable client side validation - self.client_side_validation = True - - self.socket_options = None - """Options to pass down to the underlying urllib3 socket - """ - - self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" - """datetime format - """ - - self.date_format = "%Y-%m-%d" - """date format - """ - - def __deepcopy__(self, memo): - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - if k not in ('logger', 'logger_file_handler'): - setattr(result, k, copy.deepcopy(v, memo)) - # shallow copy of loggers - result.logger = copy.copy(self.logger) - # use setters to configure loggers - result.logger_file = self.logger_file - result.debug = self.debug - return result - - def __setattr__(self, name, value): - object.__setattr__(self, name, value) - - @classmethod - def set_default(cls, default): - """Set default instance of configuration. - - It stores default configuration, which can be - returned by get_default_copy method. - - :param default: object of Configuration - """ - cls._default = default - - @classmethod - def get_default_copy(cls): - """Deprecated. Please use `get_default` instead. - - Deprecated. Please use `get_default` instead. - - :return: The configuration object. - """ - return cls.get_default() - - @classmethod - def get_default(cls): - """Return the default configuration. - - This method returns newly created, based on default constructor, - object of Configuration class or returns a copy of default - configuration. - - :return: The configuration object. - """ - if cls._default is None: - cls._default = Configuration() - return cls._default - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in self.logger.items(): - logger.addHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in self.logger.items(): - logger.setLevel(logging.DEBUG) - # turn on httplib debug - httplib.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in self.logger.items(): - logger.setLevel(logging.WARNING) - # turn off httplib debug - httplib.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier, alias=None): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :param alias: The alternative identifier of apiKey. - :return: The token for api key authentication. - """ - if self.refresh_api_key_hook is not None: - self.refresh_api_key_hook(self) - key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) - if key: - prefix = self.api_key_prefix.get(identifier) - if prefix: - return "%s %s" % (prefix, key) - else: - return key - - def get_basic_auth_token(self): - """Gets HTTP basic authentication header (string). - - :return: The token for basic HTTP authentication. - """ - username = "" - if self.username is not None: - username = self.username - password = "" - if self.password is not None: - password = self.password - return urllib3.util.make_headers( - basic_auth=username + ':' + password - ).get('authorization') - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - auth = {} - if self.access_token is not None: - auth['Bearer'] = { - 'type': 'bearer', - 'in': 'header', - 'format': 'JWT', - 'key': 'Authorization', - 'value': 'Bearer ' + self.access_token - } - return auth - - def to_debug_report(self): - """Gets the essential information for debugging. - - :return: The report for debugging. - """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: v1\n"\ - "SDK Package Version: 1.0.0".\ - format(env=sys.platform, pyversion=sys.version) - - def get_host_settings(self): - """Gets an array of host settings - - :return: An array of host settings - """ - return [ - { - 'url': "https://identifier.vault.skyflowapis.com", - 'description': "Production", - }, - { - 'url': "https://identifier.vault.skyflowapis-preview.com", - 'description': "Sandbox", - } - ] - - def get_host_from_settings(self, index, variables=None, servers=None): - """Gets host URL based on the index and variables - :param index: array index of the host settings - :param variables: hash of variable and the corresponding value - :param servers: an array of host settings or None - :return: URL based on host settings - """ - if index is None: - return self._base_path - - variables = {} if variables is None else variables - servers = self.get_host_settings() if servers is None else servers - - try: - server = servers[index] - except IndexError: - raise ValueError( - "Invalid index {0} when selecting the host settings. " - "Must be less than {1}".format(index, len(servers))) - - url = server['url'] - - # go through variables and replace placeholders - for variable_name, variable in server.get('variables', {}).items(): - used_value = variables.get( - variable_name, variable['default_value']) - - if 'enum_values' in variable \ - and used_value not in variable['enum_values']: - raise ValueError( - "The variable `{0}` in the host URL has invalid value " - "{1}. Must be {2}.".format( - variable_name, variables[variable_name], - variable['enum_values'])) - - url = url.replace("{" + variable_name + "}", used_value) - - return url - - @property - def host(self): - """Return generated host.""" - return self.get_host_from_settings(self.server_index, variables=self.server_variables) - - @host.setter - def host(self, value): - """Fix base path.""" - self._base_path = value - self.server_index = None diff --git a/skyflow/generated/rest/core/__init__.py b/skyflow/generated/rest/core/__init__.py new file mode 100644 index 00000000..f03aecbf --- /dev/null +++ b/skyflow/generated/rest/core/__init__.py @@ -0,0 +1,47 @@ +# This file was auto-generated by Fern from our API Definition. + +from .api_error import ApiError +from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper +from .datetime_utils import serialize_datetime +from .file import File, convert_file_dict_to_httpx_tuples, with_content_type +from .http_client import AsyncHttpClient, HttpClient +from .jsonable_encoder import jsonable_encoder +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, +) +from .query_encoder import encode_query +from .remove_none_from_dict import remove_none_from_dict +from .request_options import RequestOptions +from .serialization import FieldMetadata, convert_and_respect_annotation_metadata + +__all__ = [ + "ApiError", + "AsyncClientWrapper", + "AsyncHttpClient", + "BaseClientWrapper", + "FieldMetadata", + "File", + "HttpClient", + "IS_PYDANTIC_V2", + "RequestOptions", + "SyncClientWrapper", + "UniversalBaseModel", + "UniversalRootModel", + "convert_and_respect_annotation_metadata", + "convert_file_dict_to_httpx_tuples", + "encode_query", + "jsonable_encoder", + "parse_obj_as", + "remove_none_from_dict", + "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", + "with_content_type", +] diff --git a/skyflow/generated/rest/core/api_error.py b/skyflow/generated/rest/core/api_error.py new file mode 100644 index 00000000..2e9fc543 --- /dev/null +++ b/skyflow/generated/rest/core/api_error.py @@ -0,0 +1,15 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + + +class ApiError(Exception): + status_code: typing.Optional[int] + body: typing.Any + + def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): + self.status_code = status_code + self.body = body + + def __str__(self) -> str: + return f"status_code: {self.status_code}, body: {self.body}" diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py new file mode 100644 index 00000000..7177cf7c --- /dev/null +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -0,0 +1,76 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +import httpx +from .http_client import HttpClient +from .http_client import AsyncHttpClient + + +class BaseClientWrapper: + def __init__( + self, + *, + token: typing.Union[str, typing.Callable[[], str]], + base_url: str, + timeout: typing.Optional[float] = None, + ): + self._token = token + self._base_url = base_url + self._timeout = timeout + + def get_headers(self) -> typing.Dict[str, str]: + headers: typing.Dict[str, str] = { + "X-Fern-Language": "Python", + "X-Fern-SDK-Name": "skyflow", + "X-Fern-SDK-Version": "1.15.2", + } + headers["Authorization"] = f"Bearer {self._get_token()}" + return headers + + def _get_token(self) -> str: + if isinstance(self._token, str): + return self._token + else: + return self._token() + + def get_base_url(self) -> str: + return self._base_url + + def get_timeout(self) -> typing.Optional[float]: + return self._timeout + + +class SyncClientWrapper(BaseClientWrapper): + def __init__( + self, + *, + token: typing.Union[str, typing.Callable[[], str]], + base_url: str, + timeout: typing.Optional[float] = None, + httpx_client: httpx.Client, + ): + super().__init__(token=token, base_url=base_url, timeout=timeout) + self.httpx_client = HttpClient( + httpx_client=httpx_client, + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, + ) + + +class AsyncClientWrapper(BaseClientWrapper): + def __init__( + self, + *, + token: typing.Union[str, typing.Callable[[], str]], + base_url: str, + timeout: typing.Optional[float] = None, + httpx_client: httpx.AsyncClient, + ): + super().__init__(token=token, base_url=base_url, timeout=timeout) + self.httpx_client = AsyncHttpClient( + httpx_client=httpx_client, + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, + ) diff --git a/skyflow/generated/rest/core/datetime_utils.py b/skyflow/generated/rest/core/datetime_utils.py new file mode 100644 index 00000000..7c9864a9 --- /dev/null +++ b/skyflow/generated/rest/core/datetime_utils.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt + + +def serialize_datetime(v: dt.datetime) -> str: + """ + Serialize a datetime including timezone info. + + Uses the timezone info provided if present, otherwise uses the current runtime's timezone info. + + UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00. + """ + + def _serialize_zoned_datetime(v: dt.datetime) -> str: + if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None): + # UTC is a special case where we use "Z" at the end instead of "+00:00" + return v.isoformat().replace("+00:00", "Z") + else: + # Delegate to the typical +/- offset format + return v.isoformat() + + if v.tzinfo is not None: + return _serialize_zoned_datetime(v) + else: + local_tz = dt.datetime.now().astimezone().tzinfo + localized_dt = v.replace(tzinfo=local_tz) + return _serialize_zoned_datetime(localized_dt) diff --git a/skyflow/generated/rest/core/file.py b/skyflow/generated/rest/core/file.py new file mode 100644 index 00000000..44b0d27c --- /dev/null +++ b/skyflow/generated/rest/core/file.py @@ -0,0 +1,67 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast + +# File typing inspired by the flexibility of types within the httpx library +# https://github.com/encode/httpx/blob/master/httpx/_types.py +FileContent = Union[IO[bytes], bytes, str] +File = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[ + Optional[str], + FileContent, + Optional[str], + Mapping[str, str], + ], +] + + +def convert_file_dict_to_httpx_tuples( + d: Dict[str, Union[File, List[File]]], +) -> List[Tuple[str, File]]: + """ + The format we use is a list of tuples, where the first element is the + name of the file and the second is the file object. Typically HTTPX wants + a dict, but to be able to send lists of files, you have to use the list + approach (which also works for non-lists) + https://github.com/encode/httpx/pull/1032 + """ + + httpx_tuples = [] + for key, file_like in d.items(): + if isinstance(file_like, list): + for file_like_item in file_like: + httpx_tuples.append((key, file_like_item)) + else: + httpx_tuples.append((key, file_like)) + return httpx_tuples + + +def with_content_type(*, file: File, default_content_type: str) -> File: + """ + This function resolves to the file's content type, if provided, and defaults + to the default_content_type value if not. + """ + if isinstance(file, tuple): + if len(file) == 2: + filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore + return (filename, content, default_content_type) + elif len(file) == 3: + filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type) + elif len(file) == 4: + filename, content, file_content_type, headers = cast( # type: ignore + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file + ) + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type, headers) + else: + raise ValueError(f"Unexpected tuple length: {len(file)}") + return (None, file, default_content_type) diff --git a/skyflow/generated/rest/core/http_client.py b/skyflow/generated/rest/core/http_client.py new file mode 100644 index 00000000..275a54cc --- /dev/null +++ b/skyflow/generated/rest/core/http_client.py @@ -0,0 +1,499 @@ +# This file was auto-generated by Fern from our API Definition. + +import asyncio +import email.utils +import json +import re +import time +import typing +import urllib.parse +from contextlib import asynccontextmanager, contextmanager +from random import random + +import httpx + +from .file import File, convert_file_dict_to_httpx_tuples +from .jsonable_encoder import jsonable_encoder +from .query_encoder import encode_query +from .remove_none_from_dict import remove_none_from_dict +from .request_options import RequestOptions + +INITIAL_RETRY_DELAY_SECONDS = 0.5 +MAX_RETRY_DELAY_SECONDS = 10 +MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30 + + +def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]: + """ + This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait. + + Inspired by the urllib3 retry implementation. + """ + retry_after_ms = response_headers.get("retry-after-ms") + if retry_after_ms is not None: + try: + return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0 + except Exception: + pass + + retry_after = response_headers.get("retry-after") + if retry_after is None: + return None + + # Attempt to parse the header as an int. + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = float(retry_after) + # Fallback to parsing it as a date. + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + return None + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + +def _retry_timeout(response: httpx.Response, retries: int) -> float: + """ + Determine the amount of time to wait before retrying a request. + This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff + with a jitter to determine the number of seconds to wait. + """ + + # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. + retry_after = _parse_retry_after(response.headers) + if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER: + return retry_after + + # Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS. + retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS) + + # Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries. + timeout = retry_delay * (1 - 0.25 * random()) + return timeout if timeout >= 0 else 0 + + +def _should_retry(response: httpx.Response) -> bool: + retryable_400s = [429, 408, 409] + return response.status_code >= 500 or response.status_code in retryable_400s + + +def remove_omit_from_dict( + original: typing.Dict[str, typing.Optional[typing.Any]], + omit: typing.Optional[typing.Any], +) -> typing.Dict[str, typing.Any]: + if omit is None: + return original + new: typing.Dict[str, typing.Any] = {} + for key, value in original.items(): + if value is not omit: + new[key] = value + return new + + +def maybe_filter_request_body( + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Optional[typing.Any]: + if data is None: + return ( + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + if request_options is not None + else None + ) + elif not isinstance(data, typing.Mapping): + data_content = jsonable_encoder(data) + else: + data_content = { + **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore + **( + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + if request_options is not None + else {} + ), + } + return data_content + + +# Abstracted out for testing purposes +def get_request_body( + *, + json: typing.Optional[typing.Any], + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]: + json_body = None + data_body = None + if data is not None: + data_body = maybe_filter_request_body(data, request_options, omit) + else: + # If both data and json are None, we send json data in the event extra properties are specified + json_body = maybe_filter_request_body(json, request_options, omit) + + # If you have an empty JSON body, you should just send None + return (json_body if json_body != {} else None), data_body if data_body != {} else None + + +class HttpClient: + def __init__( + self, + *, + httpx_client: httpx.Client, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, + ): + self.base_url = base_url + self.base_timeout = base_timeout + self.base_headers = base_headers + self.httpx_client = httpx_client + + def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + + if base_url is None: + raise ValueError("A base_url is required to make this request, please provide one and try again.") + return base_url + + def request( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 2, + omit: typing.Optional[typing.Any] = None, + ) -> httpx.Response: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + response = self.httpx_client.request( + method=method, + url=urllib.parse.urljoin(f"{base_url}/", path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + } + ) + ), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) or {} + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ), + json=json_body, + data=data_body, + content=content, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), + timeout=timeout, + ) + + max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0 + if _should_retry(response=response): + if max_retries > retries: + time.sleep(_retry_timeout(response=response, retries=retries)) + return self.request( + path=path, + method=method, + base_url=base_url, + params=params, + json=json, + content=content, + files=files, + headers=headers, + request_options=request_options, + retries=retries + 1, + omit=omit, + ) + + return response + + @contextmanager + def stream( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 2, + omit: typing.Optional[typing.Any] = None, + ) -> typing.Iterator[httpx.Response]: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + with self.httpx_client.stream( + method=method, + url=urllib.parse.urljoin(f"{base_url}/", path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) if request_options is not None else {}), + } + ) + ), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ), + json=json_body, + data=data_body, + content=content, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), + timeout=timeout, + ) as stream: + yield stream + + +class AsyncHttpClient: + def __init__( + self, + *, + httpx_client: httpx.AsyncClient, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, + ): + self.base_url = base_url + self.base_timeout = base_timeout + self.base_headers = base_headers + self.httpx_client = httpx_client + + def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + + if base_url is None: + raise ValueError("A base_url is required to make this request, please provide one and try again.") + return base_url + + async def request( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 2, + omit: typing.Optional[typing.Any] = None, + ) -> httpx.Response: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + # Add the input to each of these and do None-safety checks + response = await self.httpx_client.request( + method=method, + url=urllib.parse.urljoin(f"{base_url}/", path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + } + ) + ), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) or {} + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ), + json=json_body, + data=data_body, + content=content, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), + timeout=timeout, + ) + + max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0 + if _should_retry(response=response): + if max_retries > retries: + await asyncio.sleep(_retry_timeout(response=response, retries=retries)) + return await self.request( + path=path, + method=method, + base_url=base_url, + params=params, + json=json, + content=content, + files=files, + headers=headers, + request_options=request_options, + retries=retries + 1, + omit=omit, + ) + return response + + @asynccontextmanager + async def stream( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 2, + omit: typing.Optional[typing.Any] = None, + ) -> typing.AsyncIterator[httpx.Response]: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + async with self.httpx_client.stream( + method=method, + url=urllib.parse.urljoin(f"{base_url}/", path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) if request_options is not None else {}), + } + ) + ), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + }, + omit=omit, + ) + ) + ) + ), + json=json_body, + data=data_body, + content=content, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), + timeout=timeout, + ) as stream: + yield stream diff --git a/skyflow/generated/rest/core/jsonable_encoder.py b/skyflow/generated/rest/core/jsonable_encoder.py new file mode 100644 index 00000000..1b631e90 --- /dev/null +++ b/skyflow/generated/rest/core/jsonable_encoder.py @@ -0,0 +1,101 @@ +# This file was auto-generated by Fern from our API Definition. + +""" +jsonable_encoder converts a Python object to a JSON-friendly dict +(e.g. datetimes to strings, Pydantic models to dicts). + +Taken from FastAPI, and made a bit simpler +https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py +""" + +import base64 +import dataclasses +import datetime as dt +from enum import Enum +from pathlib import PurePath +from types import GeneratorType +from typing import Any, Callable, Dict, List, Optional, Set, Union + +import pydantic + +from .datetime_utils import serialize_datetime +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + encode_by_type, + to_jsonable_with_fallback, +) + +SetIntStr = Set[Union[int, str]] +DictIntStrAny = Dict[Union[int, str], Any] + + +def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: + custom_encoder = custom_encoder or {} + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if isinstance(obj, pydantic.BaseModel): + if IS_PYDANTIC_V2: + encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2 + else: + encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1 + if custom_encoder: + encoder.update(custom_encoder) + obj_dict = obj.dict(by_alias=True) + if "__root__" in obj_dict: + obj_dict = obj_dict["__root__"] + if "root" in obj_dict: + obj_dict = obj_dict["root"] + return jsonable_encoder(obj_dict, custom_encoder=encoder) + if dataclasses.is_dataclass(obj): + obj_dict = dataclasses.asdict(obj) # type: ignore + return jsonable_encoder(obj_dict, custom_encoder=custom_encoder) + if isinstance(obj, bytes): + return base64.b64encode(obj).decode("utf-8") + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dt.datetime): + return serialize_datetime(obj) + if isinstance(obj, dt.date): + return str(obj) + if isinstance(obj, dict): + encoded_dict = {} + allowed_keys = set(obj.keys()) + for key, value in obj.items(): + if key in allowed_keys: + encoded_key = jsonable_encoder(key, custom_encoder=custom_encoder) + encoded_value = jsonable_encoder(value, custom_encoder=custom_encoder) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): + encoded_list = [] + for item in obj: + encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) + return encoded_list + + def fallback_serializer(o: Any) -> Any: + attempt_encode = encode_by_type(o) + if attempt_encode is not None: + return attempt_encode + + try: + data = dict(o) + except Exception as e: + errors: List[Exception] = [] + errors.append(e) + try: + data = vars(o) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder(data, custom_encoder=custom_encoder) + + return to_jsonable_with_fallback(obj, fallback_serializer) diff --git a/skyflow/generated/rest/core/pydantic_utilities.py b/skyflow/generated/rest/core/pydantic_utilities.py new file mode 100644 index 00000000..ca1f4792 --- /dev/null +++ b/skyflow/generated/rest/core/pydantic_utilities.py @@ -0,0 +1,296 @@ +# This file was auto-generated by Fern from our API Definition. + +# nopycln: file +import datetime as dt +import typing +from collections import defaultdict + +import typing_extensions + +import pydantic + +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata + +IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +if IS_PYDANTIC_V2: + # isort will try to reformat the comments on these imports, which breaks mypy + # isort: off + from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_date as parse_date, + ) + from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_datetime as parse_datetime, + ) + from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + ENCODERS_BY_TYPE as encoders_by_type, + ) + from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + get_args as get_args, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + get_origin as get_origin, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_literal_type as is_literal_type, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_union as is_union, + ) + from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 +else: + from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 + from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 + from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 + from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 + from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 + from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 + + # isort: on + + +T = typing.TypeVar("T") +Model = typing.TypeVar("Model", bound=pydantic.BaseModel) + + +def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: + dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + return adapter.validate_python(dealiased_object) + else: + return pydantic.parse_obj_as(type_, dealiased_object) + + +def to_jsonable_with_fallback( + obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] +) -> typing.Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + else: + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + # Allow fields beginning with `model_` to be used in the model + protected_namespaces=(), + ) # type: ignore # Pydantic v2 + + @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 + def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 + serialized = handler(self) + data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} + return data + + else: + + class Config: + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} + + @classmethod + def model_construct( + cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + return cls.construct(_fields_set, **dealiased_object) + + @classmethod + def construct( + cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + if IS_PYDANTIC_V2: + return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 + else: + return super().construct(_fields_set, **dealiased_object) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 + else: + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + """ + Override the default dict method to `exclude_unset` by default. This function patches + `exclude_unset` to work include fields within non-None default values. + """ + # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2 + # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. + # + # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models + # that we have less control over, and this is less intrusive than custom serializers for now. + if IS_PYDANTIC_V2: + kwargs_with_defaults_exclude_unset: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_unset": True, + "exclude_none": False, + } + kwargs_with_defaults_exclude_none: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_none": True, + "exclude_unset": False, + } + dict_dump = deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + ) + + else: + _fields_set = self.__fields_set__.copy() + + fields = _get_model_fields(self.__class__) + for name, field in fields.items(): + if name not in _fields_set: + default = _get_field_default(field) + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]): + _fields_set.add(name) + + if default is not None: + self.__fields_set__.add(name) + + kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + "by_alias": True, + "exclude_unset": True, + "include": _fields_set, + **kwargs, + } + + dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields) + + return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") + + +def _union_list_of_pydantic_dicts( + source: typing.List[typing.Any], destination: typing.List[typing.Any] +) -> typing.List[typing.Any]: + converted_list: typing.List[typing.Any] = [] + for i, item in enumerate(source): + destination_value = destination[i] # type: ignore + if isinstance(item, dict): + converted_list.append(deep_union_pydantic_dicts(item, destination_value)) + elif isinstance(item, list): + converted_list.append(_union_list_of_pydantic_dicts(item, destination_value)) + else: + converted_list.append(item) + return converted_list + + +def deep_union_pydantic_dicts( + source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] +) -> typing.Dict[str, typing.Any]: + for key, value in source.items(): + node = destination.setdefault(key, {}) + if isinstance(value, dict): + deep_union_pydantic_dicts(value, node) + # Note: we do not do this same processing for sets given we do not have sets of models + # and given the sets are unordered, the processing of the set and matching objects would + # be non-trivial. + elif isinstance(value, list): + destination[key] = _union_list_of_pydantic_dicts(value, node) + else: + destination[key] = value + + return destination + + +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + pass + + UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore +else: + UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + + +def encode_by_type(o: typing.Any) -> typing.Any: + encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( + defaultdict(tuple) + ) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = typing.Callable[..., typing.Any] + + +def universal_root_validator( + pre: bool = False, +) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] + + +def _get_model_fields( + model: typing.Type["Model"], +) -> typing.Mapping[str, PydanticField]: + if IS_PYDANTIC_V2: + return model.model_fields # type: ignore # Pydantic v2 + else: + return model.__fields__ # type: ignore # Pydantic v1 + + +def _get_field_default(field: PydanticField) -> typing.Any: + try: + value = field.get_default() # type: ignore # Pydantic < v1.10.15 + except: + value = field.default + if IS_PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value diff --git a/skyflow/generated/rest/core/query_encoder.py b/skyflow/generated/rest/core/query_encoder.py new file mode 100644 index 00000000..3183001d --- /dev/null +++ b/skyflow/generated/rest/core/query_encoder.py @@ -0,0 +1,58 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict, List, Optional, Tuple + +import pydantic + + +# Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict +def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]: + result = [] + for k, v in dict_flat.items(): + key = f"{key_prefix}[{k}]" if key_prefix is not None else k + if isinstance(v, dict): + result.extend(traverse_query_dict(v, key)) + elif isinstance(v, list): + for arr_v in v: + if isinstance(arr_v, dict): + result.extend(traverse_query_dict(arr_v, key)) + else: + result.append((key, arr_v)) + else: + result.append((key, v)) + return result + + +def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]: + if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic.BaseModel): + obj_dict = query_value.dict(by_alias=True) + else: + obj_dict = query_value + return traverse_query_dict(obj_dict, query_key) + elif isinstance(query_value, list): + encoded_values: List[Tuple[str, Any]] = [] + for value in query_value: + if isinstance(value, pydantic.BaseModel) or isinstance(value, dict): + if isinstance(value, pydantic.BaseModel): + obj_dict = value.dict(by_alias=True) + elif isinstance(value, dict): + obj_dict = value + + encoded_values.extend(single_query_encoder(query_key, obj_dict)) + else: + encoded_values.append((query_key, value)) + + return encoded_values + + return [(query_key, query_value)] + + +def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]: + if query is None: + return None + + encoded_query = [] + for k, v in query.items(): + encoded_query.extend(single_query_encoder(k, v)) + return encoded_query diff --git a/skyflow/generated/rest/core/remove_none_from_dict.py b/skyflow/generated/rest/core/remove_none_from_dict.py new file mode 100644 index 00000000..c2298143 --- /dev/null +++ b/skyflow/generated/rest/core/remove_none_from_dict.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict, Mapping, Optional + + +def remove_none_from_dict(original: Mapping[str, Optional[Any]]) -> Dict[str, Any]: + new: Dict[str, Any] = {} + for key, value in original.items(): + if value is not None: + new[key] = value + return new diff --git a/skyflow/generated/rest/core/request_options.py b/skyflow/generated/rest/core/request_options.py new file mode 100644 index 00000000..1b388044 --- /dev/null +++ b/skyflow/generated/rest/core/request_options.py @@ -0,0 +1,35 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +try: + from typing import NotRequired # type: ignore +except ImportError: + from typing_extensions import NotRequired + + +class RequestOptions(typing.TypedDict, total=False): + """ + Additional options for request-specific configuration when calling APIs via the SDK. + This is used primarily as an optional final parameter for service functions. + + Attributes: + - timeout_in_seconds: int. The number of seconds to await an API call before timing out. + + - max_retries: int. The max number of retries to attempt if the API call fails. + + - additional_headers: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's header dict + + - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict + + - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict + + - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads. + """ + + timeout_in_seconds: NotRequired[int] + max_retries: NotRequired[int] + additional_headers: NotRequired[typing.Dict[str, typing.Any]] + additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]] + additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]] + chunk_size: NotRequired[int] diff --git a/skyflow/generated/rest/core/serialization.py b/skyflow/generated/rest/core/serialization.py new file mode 100644 index 00000000..cb5dcbf9 --- /dev/null +++ b/skyflow/generated/rest/core/serialization.py @@ -0,0 +1,272 @@ +# This file was auto-generated by Fern from our API Definition. + +import collections +import inspect +import typing + +import typing_extensions + +import pydantic + + +class FieldMetadata: + """ + Metadata class used to annotate fields to provide additional information. + + Example: + class MyDict(TypedDict): + field: typing.Annotated[str, FieldMetadata(alias="field_name")] + + Will serialize: `{"field": "value"}` + To: `{"field_name": "value"}` + """ + + alias: str + + def __init__(self, *, alias: str) -> None: + self.alias = alias + + +def convert_and_respect_annotation_metadata( + *, + object_: typing.Any, + annotation: typing.Any, + inner_type: typing.Optional[typing.Any] = None, + direction: typing.Literal["read", "write"], +) -> typing.Any: + """ + Respect the metadata annotations on a field, such as aliasing. This function effectively + manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for + TypedDicts, which cannot support aliasing out of the box, and can be extended for additional + utilities, such as defaults. + + Parameters + ---------- + object_ : typing.Any + + annotation : type + The type we're looking to apply typing annotations from + + inner_type : typing.Optional[type] + + Returns + ------- + typing.Any + """ + + if object_ is None: + return None + if inner_type is None: + inner_type = annotation + + clean_type = _remove_annotations(inner_type) + # Pydantic models + if ( + inspect.isclass(clean_type) + and issubclass(clean_type, pydantic.BaseModel) + and isinstance(object_, typing.Mapping) + ): + return _convert_mapping(object_, clean_type, direction) + # TypedDicts + if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping): + return _convert_mapping(object_, clean_type, direction) + + if ( + typing_extensions.get_origin(clean_type) == typing.Dict + or typing_extensions.get_origin(clean_type) == dict + or clean_type == typing.Dict + ) and isinstance(object_, typing.Dict): + key_type = typing_extensions.get_args(clean_type)[0] + value_type = typing_extensions.get_args(clean_type)[1] + + return { + key: convert_and_respect_annotation_metadata( + object_=value, + annotation=annotation, + inner_type=value_type, + direction=direction, + ) + for key, value in object_.items() + } + + # If you're iterating on a string, do not bother to coerce it to a sequence. + if not isinstance(object_, str): + if ( + typing_extensions.get_origin(clean_type) == typing.Set + or typing_extensions.get_origin(clean_type) == set + or clean_type == typing.Set + ) and isinstance(object_, typing.Set): + inner_type = typing_extensions.get_args(clean_type)[0] + return { + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + } + elif ( + ( + typing_extensions.get_origin(clean_type) == typing.List + or typing_extensions.get_origin(clean_type) == list + or clean_type == typing.List + ) + and isinstance(object_, typing.List) + ) or ( + ( + typing_extensions.get_origin(clean_type) == typing.Sequence + or typing_extensions.get_origin(clean_type) == collections.abc.Sequence + or clean_type == typing.Sequence + ) + and isinstance(object_, typing.Sequence) + ): + inner_type = typing_extensions.get_args(clean_type)[0] + return [ + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + ] + + if typing_extensions.get_origin(clean_type) == typing.Union: + # We should be able to ~relatively~ safely try to convert keys against all + # member types in the union, the edge case here is if one member aliases a field + # of the same name to a different name from another member + # Or if another member aliases a field of the same name that another member does not. + for member in typing_extensions.get_args(clean_type): + object_ = convert_and_respect_annotation_metadata( + object_=object_, + annotation=annotation, + inner_type=member, + direction=direction, + ) + return object_ + + annotated_type = _get_annotation(annotation) + if annotated_type is None: + return object_ + + # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.) + # Then we can safely call it on the recursive conversion. + return object_ + + +def _convert_mapping( + object_: typing.Mapping[str, object], + expected_type: typing.Any, + direction: typing.Literal["read", "write"], +) -> typing.Mapping[str, object]: + converted_object: typing.Dict[str, object] = {} + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + aliases_to_field_names = _get_alias_to_field_name(annotations) + for key, value in object_.items(): + if direction == "read" and key in aliases_to_field_names: + dealiased_key = aliases_to_field_names.get(key) + if dealiased_key is not None: + type_ = annotations.get(dealiased_key) + else: + type_ = annotations.get(key) + # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map + # + # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias + # then we can just pass the value through as is + if type_ is None: + converted_object[key] = value + elif direction == "read" and key not in aliases_to_field_names: + converted_object[key] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + else: + converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = ( + convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction) + ) + return converted_object + + +def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return None + + if maybe_annotated_type == typing_extensions.NotRequired: + type_ = typing_extensions.get_args(type_)[0] + maybe_annotated_type = typing_extensions.get_origin(type_) + + if maybe_annotated_type == typing_extensions.Annotated: + return type_ + + return None + + +def _remove_annotations(type_: typing.Any) -> typing.Any: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return type_ + + if maybe_annotated_type == typing_extensions.NotRequired: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + if maybe_annotated_type == typing_extensions.Annotated: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + return type_ + + +def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_alias_to_field_name(annotations) + + +def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_field_to_alias_name(annotations) + + +def _get_alias_to_field_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[maybe_alias] = field + return aliases + + +def _get_field_to_alias_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[field] = maybe_alias + return aliases + + +def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]: + maybe_annotated_type = _get_annotation(type_) + + if maybe_annotated_type is not None: + # The actual annotations are 1 onward, the first is the annotated type + annotations = typing_extensions.get_args(maybe_annotated_type)[1:] + + for annotation in annotations: + if isinstance(annotation, FieldMetadata) and annotation.alias is not None: + return annotation.alias + return None + + +def _alias_key( + key: str, + type_: typing.Any, + direction: typing.Literal["read", "write"], + aliases_to_field_names: typing.Dict[str, str], +) -> str: + if direction == "read": + return aliases_to_field_names.get(key, key) + return _get_alias_from_type(type_=type_) or key diff --git a/skyflow/generated/rest/environment.py b/skyflow/generated/rest/environment.py new file mode 100644 index 00000000..8c4747ca --- /dev/null +++ b/skyflow/generated/rest/environment.py @@ -0,0 +1,8 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum + + +class SkyflowEnvironment(enum.Enum): + PRODUCTION = "https://identifier.vault.skyflowapis.com" + SANDBOX = "https://identifier.vault.skyflowapis-preview.com" diff --git a/skyflow/generated/rest/errors/__init__.py b/skyflow/generated/rest/errors/__init__.py new file mode 100644 index 00000000..64f898f5 --- /dev/null +++ b/skyflow/generated/rest/errors/__init__.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +from .bad_request_error import BadRequestError +from .not_found_error import NotFoundError +from .unauthorized_error import UnauthorizedError + +__all__ = ["BadRequestError", "NotFoundError", "UnauthorizedError"] diff --git a/skyflow/generated/rest/errors/bad_request_error.py b/skyflow/generated/rest/errors/bad_request_error.py new file mode 100644 index 00000000..2f3dba61 --- /dev/null +++ b/skyflow/generated/rest/errors/bad_request_error.py @@ -0,0 +1,9 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.api_error import ApiError +import typing + + +class BadRequestError(ApiError): + def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): + super().__init__(status_code=400, body=body) diff --git a/skyflow/generated/rest/errors/not_found_error.py b/skyflow/generated/rest/errors/not_found_error.py new file mode 100644 index 00000000..b557be0a --- /dev/null +++ b/skyflow/generated/rest/errors/not_found_error.py @@ -0,0 +1,9 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.api_error import ApiError +import typing + + +class NotFoundError(ApiError): + def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): + super().__init__(status_code=404, body=body) diff --git a/skyflow/generated/rest/errors/unauthorized_error.py b/skyflow/generated/rest/errors/unauthorized_error.py new file mode 100644 index 00000000..6d01cc9f --- /dev/null +++ b/skyflow/generated/rest/errors/unauthorized_error.py @@ -0,0 +1,9 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.api_error import ApiError +import typing + + +class UnauthorizedError(ApiError): + def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): + super().__init__(status_code=401, body=body) diff --git a/skyflow/generated/rest/exceptions.py b/skyflow/generated/rest/exceptions.py deleted file mode 100644 index ef323e2e..00000000 --- a/skyflow/generated/rest/exceptions.py +++ /dev/null @@ -1,200 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -from typing import Any, Optional -from typing_extensions import Self - -class OpenApiException(Exception): - """The base exception class for all OpenAPIExceptions""" - - -class ApiTypeError(OpenApiException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, - key_type=None) -> None: - """ Raises an exception for TypeErrors - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list): a list of keys an indices to get to the - current_item - None if unset - valid_classes (tuple): the primitive classes that current item - should be an instance of - None if unset - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - None if unset - """ - self.path_to_item = path_to_item - self.valid_classes = valid_classes - self.key_type = key_type - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiTypeError, self).__init__(full_msg) - - -class ApiValueError(OpenApiException, ValueError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list) the path to the exception in the - received_data dict. None if unset - """ - - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiValueError, self).__init__(full_msg) - - -class ApiAttributeError(OpenApiException, AttributeError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Raised when an attribute reference or assignment fails. - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiAttributeError, self).__init__(full_msg) - - -class ApiKeyError(OpenApiException, KeyError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(ApiKeyError, self).__init__(full_msg) - - -class ApiException(OpenApiException): - - def __init__( - self, - status=None, - reason=None, - http_resp=None, - *, - body: Optional[str] = None, - data: Optional[Any] = None, - ) -> None: - self.status = status - self.reason = reason - self.body = body - self.data = data - self.headers = None - - if http_resp: - if self.status is None: - self.status = http_resp.status - if self.reason is None: - self.reason = http_resp.reason - if self.body is None: - try: - self.body = http_resp.data.decode('utf-8') - except Exception: - pass - self.headers = http_resp.getheaders() - - @classmethod - def from_response( - cls, - *, - http_resp, - body: Optional[str], - data: Optional[Any], - ) -> Self: - if http_resp.status == 400: - raise BadRequestException(http_resp=http_resp, body=body, data=data) - - if http_resp.status == 401: - raise UnauthorizedException(http_resp=http_resp, body=body, data=data) - - if http_resp.status == 403: - raise ForbiddenException(http_resp=http_resp, body=body, data=data) - - if http_resp.status == 404: - raise NotFoundException(http_resp=http_resp, body=body, data=data) - - if 500 <= http_resp.status <= 599: - raise ServiceException(http_resp=http_resp, body=body, data=data) - raise ApiException(http_resp=http_resp, body=body, data=data) - - def __str__(self): - """Custom error messages for exception""" - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format( - self.headers) - - if self.data or self.body: - error_message += "HTTP response body: {0}\n".format(self.data or self.body) - - return error_message - - -class BadRequestException(ApiException): - pass - - -class NotFoundException(ApiException): - pass - - -class UnauthorizedException(ApiException): - pass - - -class ForbiddenException(ApiException): - pass - - -class ServiceException(ApiException): - pass - - -def render_path(path_to_item): - """Returns a string representation of a path""" - result = "" - for pth in path_to_item: - if isinstance(pth, int): - result += "[{0}]".format(pth) - else: - result += "['{0}']".format(pth) - return result diff --git a/skyflow/generated/rest/models/__init__.py b/skyflow/generated/rest/models/__init__.py deleted file mode 100644 index 379cf733..00000000 --- a/skyflow/generated/rest/models/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding: utf-8 - -# flake8: noqa -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -# import models into model package -from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType -from skyflow.generated.rest.models.audit_event_context import AuditEventContext -from skyflow.generated.rest.models.audit_event_data import AuditEventData -from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo -from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod -from skyflow.generated.rest.models.context_access_type import ContextAccessType -from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode -from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType -from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus -from skyflow.generated.rest.models.protobuf_any import ProtobufAny -from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody -from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody -from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody -from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody -from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody -from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION -from skyflow.generated.rest.models.request_action_type import RequestActionType -from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions -from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse -from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse -from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent -from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest -from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest -from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse -from skyflow.generated.rest.models.v1_byot import V1BYOT -from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse -from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord -from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse -from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse -from skyflow.generated.rest.models.v1_card import V1Card -from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse -from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse -from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload -from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest -from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse -from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus -from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse -from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse -from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse -from skyflow.generated.rest.models.v1_member_type import V1MemberType -from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties -from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload -from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest -from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse -from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse -from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse -from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping -from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig - -from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest -from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse \ No newline at end of file diff --git a/skyflow/generated/rest/models/audit_event_audit_resource_type.py b/skyflow/generated/rest/models/audit_event_audit_resource_type.py deleted file mode 100644 index c425dce7..00000000 --- a/skyflow/generated/rest/models/audit_event_audit_resource_type.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class AuditEventAuditResourceType(str, Enum): - """ - Type of the resource. - """ - - """ - allowed enum values - """ - NONE_API = 'NONE_API' - ACCOUNT = 'ACCOUNT' - AUDIT = 'AUDIT' - BASE_DATA_TYPE = 'BASE_DATA_TYPE' - FIELD_TEMPLATE = 'FIELD_TEMPLATE' - FILE = 'FILE' - KEY = 'KEY' - POLICY = 'POLICY' - PROTO_PARSE = 'PROTO_PARSE' - RECORD = 'RECORD' - ROLE = 'ROLE' - RULE = 'RULE' - SECRET = 'SECRET' - SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' - TOKEN = 'TOKEN' - USER = 'USER' - VAULT = 'VAULT' - VAULT_TEMPLATE = 'VAULT_TEMPLATE' - WORKSPACE = 'WORKSPACE' - TABLE = 'TABLE' - POLICY_TEMPLATE = 'POLICY_TEMPLATE' - MEMBER = 'MEMBER' - TAG = 'TAG' - CONNECTION = 'CONNECTION' - MIGRATION = 'MIGRATION' - SCHEDULED_JOB = 'SCHEDULED_JOB' - JOB = 'JOB' - COLUMN_NAME = 'COLUMN_NAME' - NETWORK_TOKEN = 'NETWORK_TOKEN' - SUBSCRIPTION = 'SUBSCRIPTION' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of AuditEventAuditResourceType from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/audit_event_context.py b/skyflow/generated/rest/models/audit_event_context.py deleted file mode 100644 index af280eb0..00000000 --- a/skyflow/generated/rest/models/audit_event_context.py +++ /dev/null @@ -1,113 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.context_access_type import ContextAccessType -from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode -from skyflow.generated.rest.models.v1_member_type import V1MemberType -from typing import Optional, Set -from typing_extensions import Self - -class AuditEventContext(BaseModel): - """ - Context for an audit event. - """ # noqa: E501 - change_id: Optional[StrictStr] = Field(default=None, description="ID for the audit event.", alias="changeID") - request_id: Optional[StrictStr] = Field(default=None, description="ID for the request that caused the event.", alias="requestID") - trace_id: Optional[StrictStr] = Field(default=None, description="ID for the request set by the service that received the request.", alias="traceID") - session_id: Optional[StrictStr] = Field(default=None, description="ID for the session in which the request was sent.", alias="sessionID") - actor: Optional[StrictStr] = Field(default=None, description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.") - actor_type: Optional[V1MemberType] = Field(default=V1MemberType.NONE, alias="actorType") - access_type: Optional[ContextAccessType] = Field(default=ContextAccessType.ACCESS_NONE, alias="accessType") - ip_address: Optional[StrictStr] = Field(default=None, description="IP Address of the client that made the request.", alias="ipAddress") - origin: Optional[StrictStr] = Field(default=None, description="HTTP Origin request header (including scheme, hostname, and port) of the request.") - auth_mode: Optional[ContextAuthMode] = Field(default=ContextAuthMode.AUTH_NONE, alias="authMode") - jwt_id: Optional[StrictStr] = Field(default=None, description="ID of the JWT token.", alias="jwtID") - bearer_token_context_id: Optional[StrictStr] = Field(default=None, description="Embedded User Context.", alias="bearerTokenContextID") - __properties: ClassVar[List[str]] = ["changeID", "requestID", "traceID", "sessionID", "actor", "actorType", "accessType", "ipAddress", "origin", "authMode", "jwtID", "bearerTokenContextID"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AuditEventContext from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AuditEventContext from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "changeID": obj.get("changeID"), - "requestID": obj.get("requestID"), - "traceID": obj.get("traceID"), - "sessionID": obj.get("sessionID"), - "actor": obj.get("actor"), - "actorType": obj.get("actorType") if obj.get("actorType") is not None else V1MemberType.NONE, - "accessType": obj.get("accessType") if obj.get("accessType") is not None else ContextAccessType.ACCESS_NONE, - "ipAddress": obj.get("ipAddress"), - "origin": obj.get("origin"), - "authMode": obj.get("authMode") if obj.get("authMode") is not None else ContextAuthMode.AUTH_NONE, - "jwtID": obj.get("jwtID"), - "bearerTokenContextID": obj.get("bearerTokenContextID") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/audit_event_data.py b/skyflow/generated/rest/models/audit_event_data.py deleted file mode 100644 index 5a463f00..00000000 --- a/skyflow/generated/rest/models/audit_event_data.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class AuditEventData(BaseModel): - """ - Any Sensitive data that needs to be wrapped. - """ # noqa: E501 - content: Optional[StrictStr] = Field(default=None, description="The entire body of the data requested or the query fired.") - __properties: ClassVar[List[str]] = ["content"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AuditEventData from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AuditEventData from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "content": obj.get("content") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/audit_event_http_info.py b/skyflow/generated/rest/models/audit_event_http_info.py deleted file mode 100644 index b3b2f074..00000000 --- a/skyflow/generated/rest/models/audit_event_http_info.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class AuditEventHTTPInfo(BaseModel): - """ - AuditEventHTTPInfo - """ # noqa: E501 - uri: Optional[StrictStr] = Field(default=None, description="The http URI that is used.", alias="URI") - method: Optional[StrictStr] = Field(default=None, description="http method used.") - __properties: ClassVar[List[str]] = ["URI", "method"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AuditEventHTTPInfo from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AuditEventHTTPInfo from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "URI": obj.get("URI"), - "method": obj.get("method") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/batch_record_method.py b/skyflow/generated/rest/models/batch_record_method.py deleted file mode 100644 index a2892049..00000000 --- a/skyflow/generated/rest/models/batch_record_method.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class BatchRecordMethod(str, Enum): - """ - Method of the operation. - """ - - """ - allowed enum values - """ - NONE = 'NONE' - POST = 'POST' - PUT = 'PUT' - GET = 'GET' - DELETE = 'DELETE' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of BatchRecordMethod from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/context_access_type.py b/skyflow/generated/rest/models/context_access_type.py deleted file mode 100644 index e00a9df9..00000000 --- a/skyflow/generated/rest/models/context_access_type.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class ContextAccessType(str, Enum): - """ - Type of access for the request. - """ - - """ - allowed enum values - """ - ACCESS_NONE = 'ACCESS_NONE' - API = 'API' - SQL = 'SQL' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of ContextAccessType from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/context_auth_mode.py b/skyflow/generated/rest/models/context_auth_mode.py deleted file mode 100644 index fb803e7a..00000000 --- a/skyflow/generated/rest/models/context_auth_mode.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class ContextAuthMode(str, Enum): - """ - Authentication mode the `actor` used. - """ - - """ - allowed enum values - """ - AUTH_NONE = 'AUTH_NONE' - OKTA_JWT = 'OKTA_JWT' - SERVICE_ACCOUNT_JWT = 'SERVICE_ACCOUNT_JWT' - PAT_JWT = 'PAT_JWT' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of ContextAuthMode from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/detokenize_record_response_value_type.py b/skyflow/generated/rest/models/detokenize_record_response_value_type.py deleted file mode 100644 index 62460141..00000000 --- a/skyflow/generated/rest/models/detokenize_record_response_value_type.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class DetokenizeRecordResponseValueType(str, Enum): - """ - DetokenizeRecordResponseValueType - """ - - """ - allowed enum values - """ - NONE = 'NONE' - STRING = 'STRING' - INTEGER = 'INTEGER' - FLOAT = 'FLOAT' - BOOL = 'BOOL' - DATETIME = 'DATETIME' - JSON = 'JSON' - ARRAY = 'ARRAY' - DATE = 'DATE' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of DetokenizeRecordResponseValueType from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/googlerpc_status.py b/skyflow/generated/rest/models/googlerpc_status.py deleted file mode 100644 index b9914c58..00000000 --- a/skyflow/generated/rest/models/googlerpc_status.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.protobuf_any import ProtobufAny -from typing import Optional, Set -from typing_extensions import Self - -class GooglerpcStatus(BaseModel): - """ - GooglerpcStatus - """ # noqa: E501 - code: Optional[StrictInt] = None - message: Optional[StrictStr] = None - details: Optional[List[ProtobufAny]] = None - __properties: ClassVar[List[str]] = ["code", "message", "details"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of GooglerpcStatus from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in details (list) - _items = [] - if self.details: - for _item_details in self.details: - if _item_details: - _items.append(_item_details.to_dict()) - _dict['details'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of GooglerpcStatus from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "code": obj.get("code"), - "message": obj.get("message"), - "details": [ProtobufAny.from_dict(_item) for _item in obj["details"]] if obj.get("details") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/protobuf_any.py b/skyflow/generated/rest/models/protobuf_any.py deleted file mode 100644 index e29a6356..00000000 --- a/skyflow/generated/rest/models/protobuf_any.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class ProtobufAny(BaseModel): - """ - ProtobufAny - """ # noqa: E501 - type: Optional[StrictStr] = Field(default=None, alias="@type") - additional_properties: Dict[str, Any] = {} - __properties: ClassVar[List[str]] = ["@type"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of ProtobufAny from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - * Fields in `self.additional_properties` are added to the output dict. - """ - excluded_fields: Set[str] = set([ - "additional_properties", - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # puts key-value pairs in additional_properties in the top level - if self.additional_properties is not None: - for _key, _value in self.additional_properties.items(): - _dict[_key] = _value - - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of ProtobufAny from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "@type": obj.get("@type") - }) - # store additional fields in additional_properties - for _key in obj.keys(): - if _key not in cls.__properties: - _obj.additional_properties[_key] = obj.get(_key) - - return _obj - - diff --git a/skyflow/generated/rest/models/query_service_execute_query_body.py b/skyflow/generated/rest/models/query_service_execute_query_body.py deleted file mode 100644 index fa6a9bf9..00000000 --- a/skyflow/generated/rest/models/query_service_execute_query_body.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class QueryServiceExecuteQueryBody(BaseModel): - """ - QueryServiceExecuteQueryBody - """ # noqa: E501 - query: Optional[StrictStr] = Field(default=None, description="The SQL query to execute.

Supported commands:
  • SELECT
Supported operators:
  • >
  • <
  • =
  • AND
  • OR
  • NOT
  • LIKE
  • ILIKE
  • NULL
  • NOT NULL
Supported keywords:
  • FROM
  • JOIN
  • INNER JOIN
  • LEFT OUTER JOIN
  • LEFT JOIN
  • RIGHT OUTER JOIN
  • RIGHT JOIN
  • FULL OUTER JOIN
  • FULL JOIN
  • OFFSET
  • LIMIT
  • WHERE
Supported functions:
  • AVG()
  • SUM()
  • COUNT()
  • MIN()
  • MAX()
  • REDACTION()
") - __properties: ClassVar[List[str]] = ["query"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of QueryServiceExecuteQueryBody from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of QueryServiceExecuteQueryBody from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "query": obj.get("query") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/record_service_batch_operation_body.py b/skyflow/generated/rest/models/record_service_batch_operation_body.py deleted file mode 100644 index fe6ef37e..00000000 --- a/skyflow/generated/rest/models/record_service_batch_operation_body.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord -from skyflow.generated.rest.models.v1_byot import V1BYOT -from typing import Optional, Set -from typing_extensions import Self - -class RecordServiceBatchOperationBody(BaseModel): - """ - RecordServiceBatchOperationBody - """ # noqa: E501 - records: Optional[List[V1BatchRecord]] = Field(default=None, description="Record operations to perform.") - continue_on_error: Optional[StrictBool] = Field(default=None, description="Continue performing operations on partial errors.", alias="continueOnError") - byot: Optional[V1BYOT] = V1BYOT.DISABLE - __properties: ClassVar[List[str]] = ["records", "continueOnError", "byot"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of RecordServiceBatchOperationBody from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of RecordServiceBatchOperationBody from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1BatchRecord.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, - "continueOnError": obj.get("continueOnError"), - "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE - }) - return _obj - - diff --git a/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py b/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py deleted file mode 100644 index b12f79a8..00000000 --- a/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class RecordServiceBulkDeleteRecordBody(BaseModel): - """ - RecordServiceBulkDeleteRecordBody - """ # noqa: E501 - skyflow_ids: Optional[List[StrictStr]] = Field(default=None, description="`skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table.") - __properties: ClassVar[List[str]] = ["skyflow_ids"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of RecordServiceBulkDeleteRecordBody from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of RecordServiceBulkDeleteRecordBody from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "skyflow_ids": obj.get("skyflow_ids") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/record_service_insert_record_body.py b/skyflow/generated/rest/models/record_service_insert_record_body.py deleted file mode 100644 index c067fe25..00000000 --- a/skyflow/generated/rest/models/record_service_insert_record_body.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_byot import V1BYOT -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from typing import Optional, Set -from typing_extensions import Self - -class RecordServiceInsertRecordBody(BaseModel): - """ - RecordServiceInsertRecordBody - """ # noqa: E501 - records: Optional[List[V1FieldRecords]] = Field(default=None, description="Record values and tokens.") - tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") - upsert: Optional[StrictStr] = Field(default=None, description="Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.") - homogeneous: Optional[StrictBool] = Field(default=False, description="If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert.") - byot: Optional[V1BYOT] = V1BYOT.DISABLE - __properties: ClassVar[List[str]] = ["records", "tokenization", "upsert", "homogeneous", "byot"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of RecordServiceInsertRecordBody from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of RecordServiceInsertRecordBody from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, - "tokenization": obj.get("tokenization"), - "upsert": obj.get("upsert"), - "homogeneous": obj.get("homogeneous") if obj.get("homogeneous") is not None else False, - "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE - }) - return _obj - - diff --git a/skyflow/generated/rest/models/record_service_update_record_body.py b/skyflow/generated/rest/models/record_service_update_record_body.py deleted file mode 100644 index 627a2f6e..00000000 --- a/skyflow/generated/rest/models/record_service_update_record_body.py +++ /dev/null @@ -1,97 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_byot import V1BYOT -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from typing import Optional, Set -from typing_extensions import Self - -class RecordServiceUpdateRecordBody(BaseModel): - """ - RecordServiceUpdateRecordBody - """ # noqa: E501 - record: Optional[V1FieldRecords] = None - tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") - byot: Optional[V1BYOT] = V1BYOT.DISABLE - __properties: ClassVar[List[str]] = ["record", "tokenization", "byot"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of RecordServiceUpdateRecordBody from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of record - if self.record: - _dict['record'] = self.record.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of RecordServiceUpdateRecordBody from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "record": V1FieldRecords.from_dict(obj["record"]) if obj.get("record") is not None else None, - "tokenization": obj.get("tokenization"), - "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE - }) - return _obj - - diff --git a/skyflow/generated/rest/models/redaction_enum_redaction.py b/skyflow/generated/rest/models/redaction_enum_redaction.py deleted file mode 100644 index 82f1a16e..00000000 --- a/skyflow/generated/rest/models/redaction_enum_redaction.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class RedactionEnumREDACTION(str, Enum): - """ - Redaction type. Subject to policies assigned to the API caller. When used for detokenization, only supported for vaults that support [column groups](/tokenization-column-groups/). - """ - - """ - allowed enum values - """ - DEFAULT = 'DEFAULT' - REDACTED = 'REDACTED' - MASKED = 'MASKED' - PLAIN_TEXT = 'PLAIN_TEXT' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of RedactionEnumREDACTION from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/request_action_type.py b/skyflow/generated/rest/models/request_action_type.py deleted file mode 100644 index 2137d2eb..00000000 --- a/skyflow/generated/rest/models/request_action_type.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class RequestActionType(str, Enum): - """ - RequestActionType - """ - - """ - allowed enum values - """ - NONE = 'NONE' - ASSIGN = 'ASSIGN' - CREATE = 'CREATE' - DELETE = 'DELETE' - EXECUTE = 'EXECUTE' - LIST = 'LIST' - READ = 'READ' - UNASSIGN = 'UNASSIGN' - UPDATE = 'UPDATE' - VALIDATE = 'VALIDATE' - LOGIN = 'LOGIN' - ROTATE = 'ROTATE' - SCHEDULEROTATION = 'SCHEDULEROTATION' - SCHEDULEROTATIONALERT = 'SCHEDULEROTATIONALERT' - IMPORT = 'IMPORT' - GETIMPORTPARAMETERS = 'GETIMPORTPARAMETERS' - PING = 'PING' - GETCLOUDPROVIDER = 'GETCLOUDPROVIDER' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of RequestActionType from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/v1_audit_after_options.py b/skyflow/generated/rest/models/v1_audit_after_options.py deleted file mode 100644 index f8c441ef..00000000 --- a/skyflow/generated/rest/models/v1_audit_after_options.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1AuditAfterOptions(BaseModel): - """ - V1AuditAfterOptions - """ # noqa: E501 - timestamp: Optional[StrictStr] = Field(default=None, description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.") - change_id: Optional[StrictStr] = Field(default=None, description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.", alias="changeID") - __properties: ClassVar[List[str]] = ["timestamp", "changeID"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1AuditAfterOptions from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1AuditAfterOptions from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "timestamp": obj.get("timestamp"), - "changeID": obj.get("changeID") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_audit_event_response.py b/skyflow/generated/rest/models/v1_audit_event_response.py deleted file mode 100644 index bb78dfc8..00000000 --- a/skyflow/generated/rest/models/v1_audit_event_response.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.audit_event_data import AuditEventData -from typing import Optional, Set -from typing_extensions import Self - -class V1AuditEventResponse(BaseModel): - """ - Contains fields for defining Response Properties. - """ # noqa: E501 - code: Optional[StrictInt] = Field(default=None, description="The status of the overall operation.") - message: Optional[StrictStr] = Field(default=None, description="The status message for the overall operation.") - data: Optional[AuditEventData] = None - timestamp: Optional[StrictStr] = Field(default=None, description="time when this response is generated, use extention method to set it.") - __properties: ClassVar[List[str]] = ["code", "message", "data", "timestamp"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1AuditEventResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of data - if self.data: - _dict['data'] = self.data.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1AuditEventResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "code": obj.get("code"), - "message": obj.get("message"), - "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, - "timestamp": obj.get("timestamp") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_audit_response.py b/skyflow/generated/rest/models/v1_audit_response.py deleted file mode 100644 index 06a3d0df..00000000 --- a/skyflow/generated/rest/models/v1_audit_response.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions -from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent -from typing import Optional, Set -from typing_extensions import Self - -class V1AuditResponse(BaseModel): - """ - V1AuditResponse - """ # noqa: E501 - event: Optional[List[V1AuditResponseEvent]] = Field(default=None, description="Events matching the query.") - next_ops: Optional[V1AuditAfterOptions] = Field(default=None, alias="nextOps") - __properties: ClassVar[List[str]] = ["event", "nextOps"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1AuditResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in event (list) - _items = [] - if self.event: - for _item_event in self.event: - if _item_event: - _items.append(_item_event.to_dict()) - _dict['event'] = _items - # override the default output from pydantic by calling `to_dict()` of next_ops - if self.next_ops: - _dict['nextOps'] = self.next_ops.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1AuditResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "event": [V1AuditResponseEvent.from_dict(_item) for _item in obj["event"]] if obj.get("event") is not None else None, - "nextOps": V1AuditAfterOptions.from_dict(obj["nextOps"]) if obj.get("nextOps") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_audit_response_event.py b/skyflow/generated/rest/models/v1_audit_response_event.py deleted file mode 100644 index 0edd2a52..00000000 --- a/skyflow/generated/rest/models/v1_audit_response_event.py +++ /dev/null @@ -1,110 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.audit_event_context import AuditEventContext -from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse -from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest -from typing import Optional, Set -from typing_extensions import Self - -class V1AuditResponseEvent(BaseModel): - """ - Audit event details. - """ # noqa: E501 - context: Optional[AuditEventContext] = None - request: Optional[V1AuditResponseEventRequest] = None - response: Optional[V1AuditEventResponse] = None - parent_account_id: Optional[StrictStr] = Field(default=None, description="Parent account ID of the account that made the request, if any.", alias="parentAccountID") - account_id: Optional[StrictStr] = Field(default=None, description="ID of the account that made the request.", alias="accountID") - resource_ids: Optional[List[StrictStr]] = Field(default=None, description="IDs for resources involved in the event. Presented in `{resourceType}/{resourceID}` format. For example, `VAULT/cd1d815aa09b4cbfbb803bd20349f202`.", alias="resourceIDs") - __properties: ClassVar[List[str]] = ["context", "request", "response", "parentAccountID", "accountID", "resourceIDs"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1AuditResponseEvent from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of context - if self.context: - _dict['context'] = self.context.to_dict() - # override the default output from pydantic by calling `to_dict()` of request - if self.request: - _dict['request'] = self.request.to_dict() - # override the default output from pydantic by calling `to_dict()` of response - if self.response: - _dict['response'] = self.response.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1AuditResponseEvent from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "context": AuditEventContext.from_dict(obj["context"]) if obj.get("context") is not None else None, - "request": V1AuditResponseEventRequest.from_dict(obj["request"]) if obj.get("request") is not None else None, - "response": V1AuditEventResponse.from_dict(obj["response"]) if obj.get("response") is not None else None, - "parentAccountID": obj.get("parentAccountID"), - "accountID": obj.get("accountID"), - "resourceIDs": obj.get("resourceIDs") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_audit_response_event_request.py b/skyflow/generated/rest/models/v1_audit_response_event_request.py deleted file mode 100644 index 2b4c6546..00000000 --- a/skyflow/generated/rest/models/v1_audit_response_event_request.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType -from skyflow.generated.rest.models.audit_event_data import AuditEventData -from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo -from skyflow.generated.rest.models.request_action_type import RequestActionType -from typing import Optional, Set -from typing_extensions import Self - -class V1AuditResponseEventRequest(BaseModel): - """ - Contains fields for defining Request Properties. - """ # noqa: E501 - data: Optional[AuditEventData] = None - api_name: Optional[StrictStr] = Field(default=None, description="API name.", alias="apiName") - workspace_id: Optional[StrictStr] = Field(default=None, description="The workspaceID (if any) of the request.", alias="workspaceID") - vault_id: Optional[StrictStr] = Field(default=None, description="The vaultID (if any) of the request.", alias="vaultID") - tags: Optional[List[StrictStr]] = Field(default=None, description="Tags associated with the event. To provide better search capabilities. Like login.") - timestamp: Optional[StrictStr] = Field(default=None, description="time when this request is generated, use extention method to set it.") - action_type: Optional[RequestActionType] = Field(default=RequestActionType.NONE, alias="actionType") - resource_type: Optional[AuditEventAuditResourceType] = Field(default=AuditEventAuditResourceType.NONE_API, alias="resourceType") - http_info: Optional[AuditEventHTTPInfo] = Field(default=None, alias="httpInfo") - __properties: ClassVar[List[str]] = ["data", "apiName", "workspaceID", "vaultID", "tags", "timestamp", "actionType", "resourceType", "httpInfo"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1AuditResponseEventRequest from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of data - if self.data: - _dict['data'] = self.data.to_dict() - # override the default output from pydantic by calling `to_dict()` of http_info - if self.http_info: - _dict['httpInfo'] = self.http_info.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1AuditResponseEventRequest from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, - "apiName": obj.get("apiName"), - "workspaceID": obj.get("workspaceID"), - "vaultID": obj.get("vaultID"), - "tags": obj.get("tags"), - "timestamp": obj.get("timestamp"), - "actionType": obj.get("actionType") if obj.get("actionType") is not None else RequestActionType.NONE, - "resourceType": obj.get("resourceType") if obj.get("resourceType") is not None else AuditEventAuditResourceType.NONE_API, - "httpInfo": AuditEventHTTPInfo.from_dict(obj["httpInfo"]) if obj.get("httpInfo") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_batch_operation_response.py b/skyflow/generated/rest/models/v1_batch_operation_response.py deleted file mode 100644 index b790403f..00000000 --- a/skyflow/generated/rest/models/v1_batch_operation_response.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1BatchOperationResponse(BaseModel): - """ - V1BatchOperationResponse - """ # noqa: E501 - vault_id: Optional[StrictStr] = Field(default=None, description="ID of the vault.", alias="vaultID") - responses: Optional[List[Dict[str, Any]]] = Field(default=None, description="Responses in the same order as in the request. Responses have the same payload structure as their corresponding APIs:
  • `POST` returns an Insert Records response.
  • `PUT` returns an Update Record response.
  • `GET` returns a Get Record response.
  • `DELETE` returns a Delete Record response.
") - __properties: ClassVar[List[str]] = ["vaultID", "responses"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BatchOperationResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BatchOperationResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "vaultID": obj.get("vaultID"), - "responses": obj.get("responses") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_batch_record.py b/skyflow/generated/rest/models/v1_batch_record.py deleted file mode 100644 index 76480a55..00000000 --- a/skyflow/generated/rest/models/v1_batch_record.py +++ /dev/null @@ -1,108 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod -from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION -from typing import Optional, Set -from typing_extensions import Self - -class V1BatchRecord(BaseModel): - """ - V1BatchRecord - """ # noqa: E501 - fields: Optional[Dict[str, Any]] = Field(default=None, description="Field and value key pairs. For example, `{'field_1':'value_1', 'field_2':'value_2'}`. Only valid when `method` is `POST` or `PUT`.") - table_name: Optional[StrictStr] = Field(default=None, description="Name of the table to perform the operation on.", alias="tableName") - method: Optional[BatchRecordMethod] = BatchRecordMethod.NONE - batch_id: Optional[StrictStr] = Field(default=None, description="ID to group operations by. Operations in the same group are executed sequentially.", alias="batchID") - redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT - tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.") - id: Optional[StrictStr] = Field(default=None, description="`skyflow_id` for the record. Only valid when `method` is `GET`, `DELETE`, or `PUT`.", alias="ID") - download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") - upsert: Optional[StrictStr] = Field(default=None, description="Column that stores primary keys for upsert operations. The column must be marked as unique in the vault schema. Only valid when `method` is `POST`.") - tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") - __properties: ClassVar[List[str]] = ["fields", "tableName", "method", "batchID", "redaction", "tokenization", "ID", "downloadURL", "upsert", "tokens"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BatchRecord from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BatchRecord from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "fields": obj.get("fields"), - "tableName": obj.get("tableName"), - "method": obj.get("method") if obj.get("method") is not None else BatchRecordMethod.NONE, - "batchID": obj.get("batchID"), - "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT, - "tokenization": obj.get("tokenization"), - "ID": obj.get("ID"), - "downloadURL": obj.get("downloadURL"), - "upsert": obj.get("upsert"), - "tokens": obj.get("tokens") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_bin_list_request.py b/skyflow/generated/rest/models/v1_bin_list_request.py deleted file mode 100644 index 71de651e..00000000 --- a/skyflow/generated/rest/models/v1_bin_list_request.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig -from typing import Optional, Set -from typing_extensions import Self - -class V1BINListRequest(BaseModel): - """ - Request to return specific card metadata. - """ # noqa: E501 - fields: Optional[List[StrictStr]] = Field(default=None, description="Fields to return. If not specified, all fields are returned.") - bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") - vault_schema_config: Optional[V1VaultSchemaConfig] = None - skyflow_id: Optional[StrictStr] = Field(default=None, description="skyflow_id of the record.") - __properties: ClassVar[List[str]] = ["fields", "BIN", "vault_schema_config", "skyflow_id"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BINListRequest from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of vault_schema_config - if self.vault_schema_config: - _dict['vault_schema_config'] = self.vault_schema_config.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BINListRequest from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "fields": obj.get("fields"), - "BIN": obj.get("BIN"), - "vault_schema_config": V1VaultSchemaConfig.from_dict(obj["vault_schema_config"]) if obj.get("vault_schema_config") is not None else None, - "skyflow_id": obj.get("skyflow_id") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_bin_list_response.py b/skyflow/generated/rest/models/v1_bin_list_response.py deleted file mode 100644 index becf8bb4..00000000 --- a/skyflow/generated/rest/models/v1_bin_list_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_card import V1Card -from typing import Optional, Set -from typing_extensions import Self - -class V1BINListResponse(BaseModel): - """ - Response to the Get BIN request. - """ # noqa: E501 - cards_data: Optional[List[V1Card]] = Field(default=None, description="Card metadata associated with the specified BIN.") - __properties: ClassVar[List[str]] = ["cards_data"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BINListResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in cards_data (list) - _items = [] - if self.cards_data: - for _item_cards_data in self.cards_data: - if _item_cards_data: - _items.append(_item_cards_data.to_dict()) - _dict['cards_data'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BINListResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "cards_data": [V1Card.from_dict(_item) for _item in obj["cards_data"]] if obj.get("cards_data") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_bulk_delete_record_response.py b/skyflow/generated/rest/models/v1_bulk_delete_record_response.py deleted file mode 100644 index 726e1c40..00000000 --- a/skyflow/generated/rest/models/v1_bulk_delete_record_response.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1BulkDeleteRecordResponse(BaseModel): - """ - V1BulkDeleteRecordResponse - """ # noqa: E501 - record_id_response: Optional[List[StrictStr]] = Field(default=None, description="IDs for the deleted records, or `*` if all records were deleted.", alias="RecordIDResponse") - __properties: ClassVar[List[str]] = ["RecordIDResponse"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BulkDeleteRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BulkDeleteRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "RecordIDResponse": obj.get("RecordIDResponse") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_bulk_get_record_response.py b/skyflow/generated/rest/models/v1_bulk_get_record_response.py deleted file mode 100644 index df8095df..00000000 --- a/skyflow/generated/rest/models/v1_bulk_get_record_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from typing import Optional, Set -from typing_extensions import Self - -class V1BulkGetRecordResponse(BaseModel): - """ - V1BulkGetRecordResponse - """ # noqa: E501 - records: Optional[List[V1FieldRecords]] = Field(default=None, description="The specified records.") - __properties: ClassVar[List[str]] = ["records"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1BulkGetRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1BulkGetRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_byot.py b/skyflow/generated/rest/models/v1_byot.py deleted file mode 100644 index 754a70dc..00000000 --- a/skyflow/generated/rest/models/v1_byot.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class V1BYOT(str, Enum): - """ - Token insertion behavior. - DISABLE: Tokens aren't allowed for any fields. If tokens are specified, the request fails. - ENABLE: Tokens are allowed—but not required—for all fields. If tokens are specified, they're inserted. - ENABLE_STRICT: Tokens are required for all fields. If tokens are specified, they're inserted. If not, the request fails. - """ - - """ - allowed enum values - """ - DISABLE = 'DISABLE' - ENABLE = 'ENABLE' - ENABLE_STRICT = 'ENABLE_STRICT' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of V1BYOT from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/v1_card.py b/skyflow/generated/rest/models/v1_card.py deleted file mode 100644 index 2245ee74..00000000 --- a/skyflow/generated/rest/models/v1_card.py +++ /dev/null @@ -1,104 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1Card(BaseModel): - """ - Card metadata of the requested BIN. - """ # noqa: E501 - bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") - issuer_name: Optional[StrictStr] = Field(default=None, description="Name of the card issuer bank.") - country_code: Optional[StrictStr] = Field(default=None, description="Country code of the card.") - currency: Optional[StrictStr] = Field(default=None, description="Currency of the card.") - card_type: Optional[StrictStr] = Field(default=None, description="Type of the card.") - card_category: Optional[StrictStr] = Field(default=None, description="Category of the card.") - card_scheme: Optional[StrictStr] = Field(default=None, description="Scheme of the card.") - card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Last four digits of the card number.") - card_expiry: Optional[StrictStr] = Field(default=None, description="Expiry date of the card.") - __properties: ClassVar[List[str]] = ["BIN", "issuer_name", "country_code", "currency", "card_type", "card_category", "card_scheme", "card_last_four_digits", "card_expiry"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1Card from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1Card from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "BIN": obj.get("BIN"), - "issuer_name": obj.get("issuer_name"), - "country_code": obj.get("country_code"), - "currency": obj.get("currency"), - "card_type": obj.get("card_type"), - "card_category": obj.get("card_category"), - "card_scheme": obj.get("card_scheme"), - "card_last_four_digits": obj.get("card_last_four_digits"), - "card_expiry": obj.get("card_expiry") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_delete_file_response.py b/skyflow/generated/rest/models/v1_delete_file_response.py deleted file mode 100644 index e68030c0..00000000 --- a/skyflow/generated/rest/models/v1_delete_file_response.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1DeleteFileResponse(BaseModel): - """ - V1DeleteFileResponse - """ # noqa: E501 - skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the record.") - deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the file was deleted.") - __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DeleteFileResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DeleteFileResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "skyflow_id": obj.get("skyflow_id"), - "deleted": obj.get("deleted") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_delete_record_response.py b/skyflow/generated/rest/models/v1_delete_record_response.py deleted file mode 100644 index a56d3ba2..00000000 --- a/skyflow/generated/rest/models/v1_delete_record_response.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1DeleteRecordResponse(BaseModel): - """ - V1DeleteRecordResponse - """ # noqa: E501 - skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the deleted record.") - deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the record was deleted.") - __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DeleteRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DeleteRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "skyflow_id": obj.get("skyflow_id"), - "deleted": obj.get("deleted") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_detokenize_payload.py b/skyflow/generated/rest/models/v1_detokenize_payload.py deleted file mode 100644 index 0394aa1c..00000000 --- a/skyflow/generated/rest/models/v1_detokenize_payload.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictBool -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest -from typing import Optional, Set -from typing_extensions import Self - -class V1DetokenizePayload(BaseModel): - """ - V1DetokenizePayload - """ # noqa: E501 - detokenization_parameters: Optional[List[V1DetokenizeRecordRequest]] = Field(default=None, description="Detokenization details.", alias="detokenizationParameters") - download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") - continue_on_error: Optional[StrictBool] = Field(default=False, description="If `true`, the detokenization request continues even if an error occurs.", alias="continueOnError") - __properties: ClassVar[List[str]] = ["detokenizationParameters", "downloadURL", "continueOnError"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DetokenizePayload from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in detokenization_parameters (list) - _items = [] - if self.detokenization_parameters: - for _item_detokenization_parameters in self.detokenization_parameters: - if _item_detokenization_parameters: - _items.append(_item_detokenization_parameters.to_dict()) - _dict['detokenizationParameters'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DetokenizePayload from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "detokenizationParameters": [V1DetokenizeRecordRequest.from_dict(_item) for _item in obj["detokenizationParameters"]] if obj.get("detokenizationParameters") is not None else None, - "downloadURL": obj.get("downloadURL"), - "continueOnError": obj.get("continueOnError") if obj.get("continueOnError") is not None else False - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_detokenize_record_request.py b/skyflow/generated/rest/models/v1_detokenize_record_request.py deleted file mode 100644 index 2899501b..00000000 --- a/skyflow/generated/rest/models/v1_detokenize_record_request.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION -from typing import Optional, Set -from typing_extensions import Self - -class V1DetokenizeRecordRequest(BaseModel): - """ - V1DetokenizeRecordRequest - """ # noqa: E501 - token: Optional[StrictStr] = Field(default=None, description="Token that identifies the record to detokenize.") - redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT - __properties: ClassVar[List[str]] = ["token", "redaction"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DetokenizeRecordRequest from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DetokenizeRecordRequest from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "token": obj.get("token"), - "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_detokenize_record_response.py b/skyflow/generated/rest/models/v1_detokenize_record_response.py deleted file mode 100644 index 2da5d15d..00000000 --- a/skyflow/generated/rest/models/v1_detokenize_record_response.py +++ /dev/null @@ -1,95 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType -from typing import Optional, Set -from typing_extensions import Self - -class V1DetokenizeRecordResponse(BaseModel): - """ - V1DetokenizeRecordResponse - """ # noqa: E501 - token: Optional[StrictStr] = Field(default=None, description="Token of the record.") - value_type: Optional[DetokenizeRecordResponseValueType] = Field(default=DetokenizeRecordResponseValueType.NONE, alias="valueType") - value: Optional[StrictStr] = Field(default=None, description="Data corresponding to the token.") - error: Optional[StrictStr] = Field(default=None, description="Error if token isn't found.") - __properties: ClassVar[List[str]] = ["token", "valueType", "value", "error"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DetokenizeRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DetokenizeRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "token": obj.get("token"), - "valueType": obj.get("valueType") if obj.get("valueType") is not None else DetokenizeRecordResponseValueType.NONE, - "value": obj.get("value"), - "error": obj.get("error") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_detokenize_response.py b/skyflow/generated/rest/models/v1_detokenize_response.py deleted file mode 100644 index 34554aa0..00000000 --- a/skyflow/generated/rest/models/v1_detokenize_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse -from typing import Optional, Set -from typing_extensions import Self - -class V1DetokenizeResponse(BaseModel): - """ - V1DetokenizeResponse - """ # noqa: E501 - records: Optional[List[V1DetokenizeRecordResponse]] = Field(default=None, description="Records corresponding to the specified tokens.") - __properties: ClassVar[List[str]] = ["records"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1DetokenizeResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1DetokenizeResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1DetokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_field_records.py b/skyflow/generated/rest/models/v1_field_records.py deleted file mode 100644 index 913fd6d0..00000000 --- a/skyflow/generated/rest/models/v1_field_records.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1FieldRecords(BaseModel): - """ - Record values and tokens. - """ # noqa: E501 - fields: Optional[Dict[str, Any]] = Field(default=None, description="Fields and values for the record. For example, `{'field_1':'value_1', 'field_2':'value_2'}`.") - tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") - __properties: ClassVar[List[str]] = ["fields", "tokens"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1FieldRecords from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1FieldRecords from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "fields": obj.get("fields"), - "tokens": obj.get("tokens") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_file_av_scan_status.py b/skyflow/generated/rest/models/v1_file_av_scan_status.py deleted file mode 100644 index 91479e32..00000000 --- a/skyflow/generated/rest/models/v1_file_av_scan_status.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class V1FileAVScanStatus(str, Enum): - """ - Anti-virus scan status of the file. - """ - - """ - allowed enum values - """ - SCAN_NONE = 'SCAN_NONE' - SCAN_CLEAN = 'SCAN_CLEAN' - SCAN_INFECTED = 'SCAN_INFECTED' - SCAN_DELETED = 'SCAN_DELETED' - SCAN_ERROR = 'SCAN_ERROR' - SCAN_PENDING = 'SCAN_PENDING' - SCAN_UNSCANNABLE = 'SCAN_UNSCANNABLE' - SCAN_FILE_NOT_FOUND = 'SCAN_FILE_NOT_FOUND' - SCAN_INVALID = 'SCAN_INVALID' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of V1FileAVScanStatus from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/v1_get_auth_token_request.py b/skyflow/generated/rest/models/v1_get_auth_token_request.py deleted file mode 100644 index fd5b201f..00000000 --- a/skyflow/generated/rest/models/v1_get_auth_token_request.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Management API - - # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1GetAuthTokenRequest(BaseModel): - """ - V1GetAuthTokenRequest - """ # noqa: E501 - grant_type: StrictStr = Field(description="Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`.") - assertion: StrictStr = Field(description="User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
") - subject_token: Optional[StrictStr] = Field(default=None, description="Subject token.") - subject_token_type: Optional[StrictStr] = Field(default=None, description="Subject token type.") - requested_token_use: Optional[StrictStr] = Field(default=None, description="Token use type. Either `delegation` or `impersonation`.") - scope: Optional[StrictStr] = Field(default=None, description="Subset of available roles to associate with the requested token. Uses the format \"role:\\ role:\\\".") - __properties: ClassVar[List[str]] = ["grant_type", "assertion", "subject_token", "subject_token_type", "requested_token_use", "scope"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1GetAuthTokenRequest from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1GetAuthTokenRequest from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "grant_type": obj.get("grant_type"), - "assertion": obj.get("assertion"), - "subject_token": obj.get("subject_token"), - "subject_token_type": obj.get("subject_token_type"), - "requested_token_use": obj.get("requested_token_use"), - "scope": obj.get("scope") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_get_auth_token_response.py b/skyflow/generated/rest/models/v1_get_auth_token_response.py deleted file mode 100644 index c3fccac2..00000000 --- a/skyflow/generated/rest/models/v1_get_auth_token_response.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Management API - - # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1GetAuthTokenResponse(BaseModel): - """ - V1GetAuthTokenResponse - """ # noqa: E501 - access_token: Optional[StrictStr] = Field(default=None, description="AccessToken.", alias="accessToken") - token_type: Optional[StrictStr] = Field(default=None, description="TokenType : Bearer.", alias="tokenType") - __properties: ClassVar[List[str]] = ["accessToken", "tokenType"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1GetAuthTokenResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1GetAuthTokenResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "accessToken": obj.get("accessToken"), - "tokenType": obj.get("tokenType") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_get_file_scan_status_response.py b/skyflow/generated/rest/models/v1_get_file_scan_status_response.py deleted file mode 100644 index 78d83d19..00000000 --- a/skyflow/generated/rest/models/v1_get_file_scan_status_response.py +++ /dev/null @@ -1,89 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus -from typing import Optional, Set -from typing_extensions import Self - -class V1GetFileScanStatusResponse(BaseModel): - """ - V1GetFileScanStatusResponse - """ # noqa: E501 - av_scan_status: Optional[V1FileAVScanStatus] = V1FileAVScanStatus.SCAN_NONE - __properties: ClassVar[List[str]] = ["av_scan_status"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1GetFileScanStatusResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1GetFileScanStatusResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "av_scan_status": obj.get("av_scan_status") if obj.get("av_scan_status") is not None else V1FileAVScanStatus.NONE - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_get_query_response.py b/skyflow/generated/rest/models/v1_get_query_response.py deleted file mode 100644 index 3f7dd870..00000000 --- a/skyflow/generated/rest/models/v1_get_query_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_field_records import V1FieldRecords -from typing import Optional, Set -from typing_extensions import Self - -class V1GetQueryResponse(BaseModel): - """ - V1GetQueryResponse - """ # noqa: E501 - records: Optional[List[V1FieldRecords]] = Field(default=None, description="Records returned by the query.") - __properties: ClassVar[List[str]] = ["records"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1GetQueryResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1GetQueryResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_insert_record_response.py b/skyflow/generated/rest/models/v1_insert_record_response.py deleted file mode 100644 index 142f1304..00000000 --- a/skyflow/generated/rest/models/v1_insert_record_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties -from typing import Optional, Set -from typing_extensions import Self - -class V1InsertRecordResponse(BaseModel): - """ - V1InsertRecordResponse - """ # noqa: E501 - records: Optional[List[V1RecordMetaProperties]] = Field(default=None, description="Identifiers for the inserted records.") - __properties: ClassVar[List[str]] = ["records"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1InsertRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1InsertRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1RecordMetaProperties.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_member_type.py b/skyflow/generated/rest/models/v1_member_type.py deleted file mode 100644 index 60009732..00000000 --- a/skyflow/generated/rest/models/v1_member_type.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import json -from enum import Enum -from typing_extensions import Self - - -class V1MemberType(str, Enum): - """ - Type of the member. - """ - - """ - allowed enum values - """ - NONE = 'NONE' - USER = 'USER' - SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Create an instance of V1MemberType from a JSON string""" - return cls(json.loads(json_str)) - - diff --git a/skyflow/generated/rest/models/v1_record_meta_properties.py b/skyflow/generated/rest/models/v1_record_meta_properties.py deleted file mode 100644 index add596f2..00000000 --- a/skyflow/generated/rest/models/v1_record_meta_properties.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1RecordMetaProperties(BaseModel): - """ - V1RecordMetaProperties - """ # noqa: E501 - skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the inserted record.") - tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") - __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1RecordMetaProperties from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1RecordMetaProperties from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "skyflow_id": obj.get("skyflow_id"), - "tokens": obj.get("tokens") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_tokenize_payload.py b/skyflow/generated/rest/models/v1_tokenize_payload.py deleted file mode 100644 index 8a275f2b..00000000 --- a/skyflow/generated/rest/models/v1_tokenize_payload.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest -from typing import Optional, Set -from typing_extensions import Self - -class V1TokenizePayload(BaseModel): - """ - V1TokenizePayload - """ # noqa: E501 - tokenization_parameters: Optional[List[V1TokenizeRecordRequest]] = Field(default=None, description="Tokenization details.", alias="tokenizationParameters") - __properties: ClassVar[List[str]] = ["tokenizationParameters"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1TokenizePayload from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in tokenization_parameters (list) - _items = [] - if self.tokenization_parameters: - for _item_tokenization_parameters in self.tokenization_parameters: - if _item_tokenization_parameters: - _items.append(_item_tokenization_parameters.to_dict()) - _dict['tokenizationParameters'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1TokenizePayload from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "tokenizationParameters": [V1TokenizeRecordRequest.from_dict(_item) for _item in obj["tokenizationParameters"]] if obj.get("tokenizationParameters") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_tokenize_record_request.py b/skyflow/generated/rest/models/v1_tokenize_record_request.py deleted file mode 100644 index e69e1e93..00000000 --- a/skyflow/generated/rest/models/v1_tokenize_record_request.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1TokenizeRecordRequest(BaseModel): - """ - V1TokenizeRecordRequest - """ # noqa: E501 - value: Optional[StrictStr] = Field(default=None, description="Existing value to return a token for.") - column_group: Optional[StrictStr] = Field(default=None, description="Name of the column group that the value belongs to.", alias="columnGroup") - __properties: ClassVar[List[str]] = ["value", "columnGroup"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1TokenizeRecordRequest from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1TokenizeRecordRequest from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "value": obj.get("value"), - "columnGroup": obj.get("columnGroup") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_tokenize_record_response.py b/skyflow/generated/rest/models/v1_tokenize_record_response.py deleted file mode 100644 index 24ac8311..00000000 --- a/skyflow/generated/rest/models/v1_tokenize_record_response.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1TokenizeRecordResponse(BaseModel): - """ - V1TokenizeRecordResponse - """ # noqa: E501 - token: Optional[StrictStr] = Field(default=None, description="Token corresponding to a value.") - __properties: ClassVar[List[str]] = ["token"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1TokenizeRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1TokenizeRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "token": obj.get("token") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_tokenize_response.py b/skyflow/generated/rest/models/v1_tokenize_response.py deleted file mode 100644 index 4847bae5..00000000 --- a/skyflow/generated/rest/models/v1_tokenize_response.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse -from typing import Optional, Set -from typing_extensions import Self - -class V1TokenizeResponse(BaseModel): - """ - V1TokenizeResponse - """ # noqa: E501 - records: Optional[List[V1TokenizeRecordResponse]] = Field(default=None, description="Tokens corresponding to the specified values.") - __properties: ClassVar[List[str]] = ["records"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1TokenizeResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of each item in records (list) - _items = [] - if self.records: - for _item_records in self.records: - if _item_records: - _items.append(_item_records.to_dict()) - _dict['records'] = _items - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1TokenizeResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "records": [V1TokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_update_record_response.py b/skyflow/generated/rest/models/v1_update_record_response.py deleted file mode 100644 index 0d66a403..00000000 --- a/skyflow/generated/rest/models/v1_update_record_response.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1UpdateRecordResponse(BaseModel): - """ - V1UpdateRecordResponse - """ # noqa: E501 - skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the updated record.") - tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") - __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1UpdateRecordResponse from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1UpdateRecordResponse from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "skyflow_id": obj.get("skyflow_id"), - "tokens": obj.get("tokens") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_vault_field_mapping.py b/skyflow/generated/rest/models/v1_vault_field_mapping.py deleted file mode 100644 index b00c92e5..00000000 --- a/skyflow/generated/rest/models/v1_vault_field_mapping.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from typing import Optional, Set -from typing_extensions import Self - -class V1VaultFieldMapping(BaseModel): - """ - Mapping of the fields in the vault to the fields to use for the lookup. - """ # noqa: E501 - card_number: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number.") - card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number suffix.") - card_expiry: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the expiry date.") - __properties: ClassVar[List[str]] = ["card_number", "card_last_four_digits", "card_expiry"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1VaultFieldMapping from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1VaultFieldMapping from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "card_number": obj.get("card_number"), - "card_last_four_digits": obj.get("card_last_four_digits"), - "card_expiry": obj.get("card_expiry") - }) - return _obj - - diff --git a/skyflow/generated/rest/models/v1_vault_schema_config.py b/skyflow/generated/rest/models/v1_vault_schema_config.py deleted file mode 100644 index e57e21ff..00000000 --- a/skyflow/generated/rest/models/v1_vault_schema_config.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -from __future__ import annotations -import pprint -import re # noqa: F401 -import json - -from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List, Optional -from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping -from typing import Optional, Set -from typing_extensions import Self - -class V1VaultSchemaConfig(BaseModel): - """ - Details of the vault that stores additional card details. - """ # noqa: E501 - id: Optional[StrictStr] = Field(default=None, description="ID of the vault that stores card details.") - table_name: Optional[StrictStr] = Field(default=None, description="Name of the table that stores card details.") - mapping: Optional[V1VaultFieldMapping] = None - __properties: ClassVar[List[str]] = ["id", "table_name", "mapping"] - - model_config = ConfigDict( - populate_by_name=True, - validate_assignment=True, - protected_namespaces=(), - ) - - - def to_str(self) -> str: - """Returns the string representation of the model using alias""" - return pprint.pformat(self.model_dump(by_alias=True)) - - def to_json(self) -> str: - """Returns the JSON representation of the model using alias""" - # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of V1VaultSchemaConfig from a JSON string""" - return cls.from_dict(json.loads(json_str)) - - def to_dict(self) -> Dict[str, Any]: - """Return the dictionary representation of the model using alias. - - This has the following differences from calling pydantic's - `self.model_dump(by_alias=True)`: - - * `None` is only added to the output dict for nullable fields that - were set at model initialization. Other fields with value `None` - are ignored. - """ - excluded_fields: Set[str] = set([ - ]) - - _dict = self.model_dump( - by_alias=True, - exclude=excluded_fields, - exclude_none=True, - ) - # override the default output from pydantic by calling `to_dict()` of mapping - if self.mapping: - _dict['mapping'] = self.mapping.to_dict() - return _dict - - @classmethod - def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of V1VaultSchemaConfig from a dict""" - if obj is None: - return None - - if not isinstance(obj, dict): - return cls.model_validate(obj) - - _obj = cls.model_validate({ - "id": obj.get("id"), - "table_name": obj.get("table_name"), - "mapping": V1VaultFieldMapping.from_dict(obj["mapping"]) if obj.get("mapping") is not None else None - }) - return _obj - - diff --git a/skyflow/generated/rest/query/__init__.py b/skyflow/generated/rest/query/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/skyflow/generated/rest/query/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/skyflow/generated/rest/query/client.py b/skyflow/generated/rest/query/client.py new file mode 100644 index 00000000..cf3ca319 --- /dev/null +++ b/skyflow/generated/rest/query/client.py @@ -0,0 +1,181 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.v_1_get_query_response import V1GetQueryResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class QueryClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def query_service_execute_query( + self, + vault_id: str, + *, + query: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetQueryResponse: + """ + Returns records for a valid SQL query. This endpoint
  • Can return redacted record values.
  • Supports only the SELECT command.
  • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
  • Can't modify the vault or perform transactions.
  • Can't return tokens.
  • Can't return file download or render URLs.
  • Doesn't support the WHERE keyword with columns using transient tokenization.
  • Doesn't support `?` conditional for columns with column-level encryption disabled.
    • + + Parameters + ---------- + vault_id : str + ID of the vault. + + query : typing.Optional[str] + The SQL query to execute.

      Supported commands:
      • SELECT
      Supported operators:
      • >
      • <
      • =
      • AND
      • OR
      • NOT
      • LIKE
      • ILIKE
      • NULL
      • NOT NULL
      Supported keywords:
      • FROM
      • JOIN
      • INNER JOIN
      • LEFT OUTER JOIN
      • LEFT JOIN
      • RIGHT OUTER JOIN
      • RIGHT JOIN
      • FULL OUTER JOIN
      • FULL JOIN
      • OFFSET
      • LIMIT
      • WHERE
      Supported functions:
      • AVG()
      • SUM()
      • COUNT()
      • MIN()
      • MAX()
      • REDACTION()
      + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetQueryResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.query.query_service_execute_query( + vault_id="vaultID", + query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/query", + method="POST", + json={ + "query": query, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetQueryResponse, + parse_obj_as( + type_=V1GetQueryResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncQueryClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def query_service_execute_query( + self, + vault_id: str, + *, + query: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetQueryResponse: + """ + Returns records for a valid SQL query. This endpoint
      • Can return redacted record values.
      • Supports only the SELECT command.
      • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
      • Can't modify the vault or perform transactions.
      • Can't return tokens.
      • Can't return file download or render URLs.
      • Doesn't support the WHERE keyword with columns using transient tokenization.
      • Doesn't support `?` conditional for columns with column-level encryption disabled.
        • + + Parameters + ---------- + vault_id : str + ID of the vault. + + query : typing.Optional[str] + The SQL query to execute.

          Supported commands:
          • SELECT
          Supported operators:
          • >
          • <
          • =
          • AND
          • OR
          • NOT
          • LIKE
          • ILIKE
          • NULL
          • NOT NULL
          Supported keywords:
          • FROM
          • JOIN
          • INNER JOIN
          • LEFT OUTER JOIN
          • LEFT JOIN
          • RIGHT OUTER JOIN
          • RIGHT JOIN
          • FULL OUTER JOIN
          • FULL JOIN
          • OFFSET
          • LIMIT
          • WHERE
          Supported functions:
          • AVG()
          • SUM()
          • COUNT()
          • MIN()
          • MAX()
          • REDACTION()
          + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetQueryResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.query.query_service_execute_query( + vault_id="vaultID", + query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/query", + method="POST", + json={ + "query": query, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetQueryResponse, + parse_obj_as( + type_=V1GetQueryResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/records/__init__.py b/skyflow/generated/rest/records/__init__.py new file mode 100644 index 00000000..b144d479 --- /dev/null +++ b/skyflow/generated/rest/records/__init__.py @@ -0,0 +1,13 @@ +# This file was auto-generated by Fern from our API Definition. + +from .types import ( + RecordServiceBulkGetRecordRequestOrderBy, + RecordServiceBulkGetRecordRequestRedaction, + RecordServiceGetRecordRequestRedaction, +) + +__all__ = [ + "RecordServiceBulkGetRecordRequestOrderBy", + "RecordServiceBulkGetRecordRequestRedaction", + "RecordServiceGetRecordRequestRedaction", +] diff --git a/skyflow/generated/rest/records/client.py b/skyflow/generated/rest/records/client.py new file mode 100644 index 00000000..d73e0da0 --- /dev/null +++ b/skyflow/generated/rest/records/client.py @@ -0,0 +1,1978 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ..core.client_wrapper import SyncClientWrapper +from ..types.v_1_batch_record import V1BatchRecord +from ..types.v_1_byot import V1Byot +from ..core.request_options import RequestOptions +from ..types.v_1_batch_operation_response import V1BatchOperationResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.pydantic_utilities import parse_obj_as +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from .types.record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction +from .types.record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy +from ..types.v_1_bulk_get_record_response import V1BulkGetRecordResponse +from ..types.v_1_field_records import V1FieldRecords +from ..types.v_1_insert_record_response import V1InsertRecordResponse +from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from .types.record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction +from ..types.v_1_update_record_response import V1UpdateRecordResponse +from ..types.v_1_delete_record_response import V1DeleteRecordResponse +from .. import core +from ..types.v_1_delete_file_response import V1DeleteFileResponse +from ..types.v_1_get_file_scan_status_response import V1GetFileScanStatusResponse +from ..core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RecordsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def record_service_batch_operation( + self, + vault_id: str, + *, + records: typing.Optional[typing.Sequence[V1BatchRecord]] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BatchOperationResponse: + """ + Performs multiple record operations in a single transaction. + + Parameters + ---------- + vault_id : str + ID of the vault. + + records : typing.Optional[typing.Sequence[V1BatchRecord]] + Record operations to perform. + + continue_on_error : typing.Optional[bool] + Continue performing operations on partial errors. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BatchOperationResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow, V1BatchRecord + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_batch_operation( + vault_id="vaultID", + records=[ + V1BatchRecord( + fields={ + "drivers_license_number": "89867453", + "name": "Connor", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + table_name="persons", + method="POST", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + download_url=False, + upsert="drivers_license_number", + ), + V1BatchRecord( + table_name="persons", + method="GET", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", + download_url=True, + ), + ], + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" + ), + "continueOnError": continue_on_error, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BatchOperationResponse, + parse_obj_as( + type_=V1BatchOperationResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_bulk_get_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + redaction: typing.Optional[RecordServiceBulkGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + offset: typing.Optional[str] = None, + limit: typing.Optional[str] = None, + download_url: typing.Optional[bool] = None, + column_name: typing.Optional[str] = None, + column_values: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + order_by: typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BulkGetRecordResponse: + """ + Gets the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table that contains the records. + + skyflow_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]] + `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

          If not specified, returns the first 25 records in the table. + + redaction : typing.Optional[RecordServiceBulkGetRecordRequestRedaction] + Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + offset : typing.Optional[str] + Record position at which to start receiving data. + + limit : typing.Optional[str] + Number of record to return. Maximum 25. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + column_name : typing.Optional[str] + Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + column_values : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

          `column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + order_by : typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] + Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BulkGetRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_bulk_get_record( + vault_id="vaultID", + object_name="objectName", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="GET", + params={ + "skyflow_ids": skyflow_ids, + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "offset": offset, + "limit": limit, + "downloadURL": download_url, + "column_name": column_name, + "column_values": column_values, + "order_by": order_by, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BulkGetRecordResponse, + parse_obj_as( + type_=V1BulkGetRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_insert_record( + self, + vault_id: str, + object_name: str, + *, + records: typing.Optional[typing.Sequence[V1FieldRecords]] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + upsert: typing.Optional[str] = OMIT, + homogeneous: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1InsertRecordResponse: + """ + Inserts a record in the specified table.

          The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

          Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + records : typing.Optional[typing.Sequence[V1FieldRecords]] + Record values and tokens. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + upsert : typing.Optional[str] + Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

          When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed. + + homogeneous : typing.Optional[bool] + If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1InsertRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow, V1FieldRecords + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_insert_record( + vault_id="vaultID", + object_name="objectName", + records=[ + V1FieldRecords( + fields={ + "drivers_license_number": "13456789", + "name": "John", + "phone_number": "1236784563", + "ssn": "123-45-6789", + }, + ), + V1FieldRecords( + fields={ + "drivers_license_number": "98765432", + "name": "James", + "phone_number": "9876543215", + "ssn": "345-45-9876", + }, + ), + ], + tokenization=True, + upsert="drivers_license_number", + homogeneous=False, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" + ), + "tokenization": tokenization, + "upsert": upsert, + "homogeneous": homogeneous, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1InsertRecordResponse, + parse_obj_as( + type_=V1InsertRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_bulk_delete_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BulkDeleteRecordResponse: + """ + Deletes the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + skyflow_ids : typing.Optional[typing.Sequence[str]] + `skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BulkDeleteRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_bulk_delete_record( + vault_id="vaultID", + object_name="objectName", + skyflow_ids=[ + "51782ea4-91a5-4430-a06d-f4b76efd3d2f", + "110ce08f-6059-4874-b1ae-7c6651d286ff", + ], + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="DELETE", + json={ + "skyflow_ids": skyflow_ids, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BulkDeleteRecordResponse, + parse_obj_as( + type_=V1BulkDeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_get_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + redaction: typing.Optional[RecordServiceGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + download_url: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1FieldRecords: + """ + Returns the specified record from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + redaction : typing.Optional[RecordServiceGetRecordRequestRedaction] + Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1FieldRecords + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_get_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="GET", + params={ + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "downloadURL": download_url, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1FieldRecords, + parse_obj_as( + type_=V1FieldRecords, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_update_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + record: typing.Optional[V1FieldRecords] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1UpdateRecordResponse: + """ + Updates the specified record in a table.

          When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

          The time-to-live (TTL) for a transient field resets when the field value is updated. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + record : typing.Optional[V1FieldRecords] + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1UpdateRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow, V1FieldRecords + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_update_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + record=V1FieldRecords( + fields={ + "drivers_license_number": "89867453", + "name": "Steve Smith", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + ), + tokenization=True, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="PUT", + json={ + "record": convert_and_respect_annotation_metadata( + object_=record, annotation=V1FieldRecords, direction="write" + ), + "tokenization": tokenization, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_delete_record( + self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> V1DeleteRecordResponse: + """ + Deletes the specified record from a table.

          Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record to delete. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DeleteRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_delete_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DeleteRecordResponse, + parse_obj_as( + type_=V1DeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def file_service_upload_file( + self, + vault_id: str, + object_name: str, + id: str, + *, + file_column_name: typing.Optional[core.File] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1UpdateRecordResponse: + """ + Uploads a file to the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + file_column_name : typing.Optional[core.File] + See core.File for more documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1UpdateRecordResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_upload_file( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", + method="POST", + data={}, + files={ + "fileColumnName": file_column_name, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def file_service_delete_file( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DeleteFileResponse: + """ + Deletes a file from the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DeleteFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_delete_file( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DeleteFileResponse, + parse_obj_as( + type_=V1DeleteFileResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def file_service_get_file_scan_status( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetFileScanStatusResponse: + """ + Returns the anti-virus scan status of a file. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetFileScanStatusResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_get_file_scan_status( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetFileScanStatusResponse, + parse_obj_as( + type_=V1GetFileScanStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncRecordsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def record_service_batch_operation( + self, + vault_id: str, + *, + records: typing.Optional[typing.Sequence[V1BatchRecord]] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BatchOperationResponse: + """ + Performs multiple record operations in a single transaction. + + Parameters + ---------- + vault_id : str + ID of the vault. + + records : typing.Optional[typing.Sequence[V1BatchRecord]] + Record operations to perform. + + continue_on_error : typing.Optional[bool] + Continue performing operations on partial errors. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BatchOperationResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow, V1BatchRecord + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_batch_operation( + vault_id="vaultID", + records=[ + V1BatchRecord( + fields={ + "drivers_license_number": "89867453", + "name": "Connor", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + table_name="persons", + method="POST", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + download_url=False, + upsert="drivers_license_number", + ), + V1BatchRecord( + table_name="persons", + method="GET", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", + download_url=True, + ), + ], + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" + ), + "continueOnError": continue_on_error, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BatchOperationResponse, + parse_obj_as( + type_=V1BatchOperationResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_bulk_get_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + redaction: typing.Optional[RecordServiceBulkGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + offset: typing.Optional[str] = None, + limit: typing.Optional[str] = None, + download_url: typing.Optional[bool] = None, + column_name: typing.Optional[str] = None, + column_values: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + order_by: typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BulkGetRecordResponse: + """ + Gets the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table that contains the records. + + skyflow_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]] + `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

          If not specified, returns the first 25 records in the table. + + redaction : typing.Optional[RecordServiceBulkGetRecordRequestRedaction] + Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + offset : typing.Optional[str] + Record position at which to start receiving data. + + limit : typing.Optional[str] + Number of record to return. Maximum 25. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + column_name : typing.Optional[str] + Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + column_values : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

          `column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + order_by : typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] + Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BulkGetRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_bulk_get_record( + vault_id="vaultID", + object_name="objectName", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="GET", + params={ + "skyflow_ids": skyflow_ids, + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "offset": offset, + "limit": limit, + "downloadURL": download_url, + "column_name": column_name, + "column_values": column_values, + "order_by": order_by, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BulkGetRecordResponse, + parse_obj_as( + type_=V1BulkGetRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_insert_record( + self, + vault_id: str, + object_name: str, + *, + records: typing.Optional[typing.Sequence[V1FieldRecords]] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + upsert: typing.Optional[str] = OMIT, + homogeneous: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1InsertRecordResponse: + """ + Inserts a record in the specified table.

          The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

          Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + records : typing.Optional[typing.Sequence[V1FieldRecords]] + Record values and tokens. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + upsert : typing.Optional[str] + Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

          When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed. + + homogeneous : typing.Optional[bool] + If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1InsertRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow, V1FieldRecords + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_insert_record( + vault_id="vaultID", + object_name="objectName", + records=[ + V1FieldRecords( + fields={ + "drivers_license_number": "13456789", + "name": "John", + "phone_number": "1236784563", + "ssn": "123-45-6789", + }, + ), + V1FieldRecords( + fields={ + "drivers_license_number": "98765432", + "name": "James", + "phone_number": "9876543215", + "ssn": "345-45-9876", + }, + ), + ], + tokenization=True, + upsert="drivers_license_number", + homogeneous=False, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" + ), + "tokenization": tokenization, + "upsert": upsert, + "homogeneous": homogeneous, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1InsertRecordResponse, + parse_obj_as( + type_=V1InsertRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_bulk_delete_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1BulkDeleteRecordResponse: + """ + Deletes the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + skyflow_ids : typing.Optional[typing.Sequence[str]] + `skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1BulkDeleteRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_bulk_delete_record( + vault_id="vaultID", + object_name="objectName", + skyflow_ids=[ + "51782ea4-91a5-4430-a06d-f4b76efd3d2f", + "110ce08f-6059-4874-b1ae-7c6651d286ff", + ], + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="DELETE", + json={ + "skyflow_ids": skyflow_ids, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1BulkDeleteRecordResponse, + parse_obj_as( + type_=V1BulkDeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_get_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + redaction: typing.Optional[RecordServiceGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + download_url: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1FieldRecords: + """ + Returns the specified record from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + redaction : typing.Optional[RecordServiceGetRecordRequestRedaction] + Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1FieldRecords + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_get_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="GET", + params={ + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "downloadURL": download_url, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1FieldRecords, + parse_obj_as( + type_=V1FieldRecords, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_update_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + record: typing.Optional[V1FieldRecords] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1UpdateRecordResponse: + """ + Updates the specified record in a table.

          When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

          The time-to-live (TTL) for a transient field resets when the field value is updated. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + record : typing.Optional[V1FieldRecords] + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1UpdateRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow, V1FieldRecords + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_update_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + record=V1FieldRecords( + fields={ + "drivers_license_number": "89867453", + "name": "Steve Smith", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + ), + tokenization=True, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="PUT", + json={ + "record": convert_and_respect_annotation_metadata( + object_=record, annotation=V1FieldRecords, direction="write" + ), + "tokenization": tokenization, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_delete_record( + self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> V1DeleteRecordResponse: + """ + Deletes the specified record from a table.

          Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record to delete. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DeleteRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.record_service_delete_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DeleteRecordResponse, + parse_obj_as( + type_=V1DeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def file_service_upload_file( + self, + vault_id: str, + object_name: str, + id: str, + *, + file_column_name: typing.Optional[core.File] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1UpdateRecordResponse: + """ + Uploads a file to the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + file_column_name : typing.Optional[core.File] + See core.File for more documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1UpdateRecordResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.file_service_upload_file( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", + method="POST", + data={}, + files={ + "fileColumnName": file_column_name, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def file_service_delete_file( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DeleteFileResponse: + """ + Deletes a file from the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DeleteFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.file_service_delete_file( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DeleteFileResponse, + parse_obj_as( + type_=V1DeleteFileResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def file_service_get_file_scan_status( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1GetFileScanStatusResponse: + """ + Returns the anti-virus scan status of a file. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1GetFileScanStatusResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.file_service_get_file_scan_status( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1GetFileScanStatusResponse, + parse_obj_as( + type_=V1GetFileScanStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/records/types/__init__.py b/skyflow/generated/rest/records/types/__init__.py new file mode 100644 index 00000000..9e9ce24e --- /dev/null +++ b/skyflow/generated/rest/records/types/__init__.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +from .record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy +from .record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction +from .record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction + +__all__ = [ + "RecordServiceBulkGetRecordRequestOrderBy", + "RecordServiceBulkGetRecordRequestRedaction", + "RecordServiceGetRecordRequestRedaction", +] diff --git a/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_order_by.py b/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_order_by.py new file mode 100644 index 00000000..41f10c0b --- /dev/null +++ b/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_order_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RecordServiceBulkGetRecordRequestOrderBy = typing.Union[typing.Literal["ASCENDING", "DESCENDING", "NONE"], typing.Any] diff --git a/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_redaction.py b/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_redaction.py new file mode 100644 index 00000000..69bf6788 --- /dev/null +++ b/skyflow/generated/rest/records/types/record_service_bulk_get_record_request_redaction.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RecordServiceBulkGetRecordRequestRedaction = typing.Union[ + typing.Literal["DEFAULT", "REDACTED", "MASKED", "PLAIN_TEXT"], typing.Any +] diff --git a/skyflow/generated/rest/records/types/record_service_get_record_request_redaction.py b/skyflow/generated/rest/records/types/record_service_get_record_request_redaction.py new file mode 100644 index 00000000..d7de8c1a --- /dev/null +++ b/skyflow/generated/rest/records/types/record_service_get_record_request_redaction.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RecordServiceGetRecordRequestRedaction = typing.Union[ + typing.Literal["DEFAULT", "REDACTED", "MASKED", "PLAIN_TEXT"], typing.Any +] diff --git a/skyflow/generated/rest/rest.py b/skyflow/generated/rest/rest.py deleted file mode 100644 index 1aaefdb3..00000000 --- a/skyflow/generated/rest/rest.py +++ /dev/null @@ -1,258 +0,0 @@ -# coding: utf-8 - -""" - Skyflow Data API - - # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
          • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
          • Production: https://*identifier*.vault.skyflowapis.com
          When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
- - The version of the OpenAPI document: v1 - Contact: support@skyflow.com - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -import io -import json -import re -import ssl - -import urllib3 - -from skyflow.generated.rest.exceptions import ApiException, ApiValueError - -SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} -RESTResponseType = urllib3.HTTPResponse - - -def is_socks_proxy_url(url): - if url is None: - return False - split_section = url.split("://") - if len(split_section) < 2: - return False - else: - return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES - - -class RESTResponse(io.IOBase): - - def __init__(self, resp) -> None: - self.response = resp - self.status = resp.status - self.reason = resp.reason - self.data = None - - def read(self): - if self.data is None: - self.data = self.response.data - return self.data - - def getheaders(self): - """Returns a dictionary of the response headers.""" - return self.response.headers - - def getheader(self, name, default=None): - """Returns a given response header.""" - return self.response.headers.get(name, default) - - -class RESTClientObject: - - def __init__(self, configuration) -> None: - # urllib3.PoolManager will pass all kw parameters to connectionpool - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 - - # cert_reqs - if configuration.verify_ssl: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - pool_args = { - "cert_reqs": cert_reqs, - "ca_certs": configuration.ssl_ca_cert, - "cert_file": configuration.cert_file, - "key_file": configuration.key_file, - } - if configuration.assert_hostname is not None: - pool_args['assert_hostname'] = ( - configuration.assert_hostname - ) - - if configuration.retries is not None: - pool_args['retries'] = configuration.retries - - if configuration.tls_server_name: - pool_args['server_hostname'] = configuration.tls_server_name - - - if configuration.socket_options is not None: - pool_args['socket_options'] = configuration.socket_options - - if configuration.connection_pool_maxsize is not None: - pool_args['maxsize'] = configuration.connection_pool_maxsize - - # https pool manager - self.pool_manager: urllib3.PoolManager - - if configuration.proxy: - if is_socks_proxy_url(configuration.proxy): - from urllib3.contrib.socks import SOCKSProxyManager - pool_args["proxy_url"] = configuration.proxy - pool_args["headers"] = configuration.proxy_headers - self.pool_manager = SOCKSProxyManager(**pool_args) - else: - pool_args["proxy_url"] = configuration.proxy - pool_args["proxy_headers"] = configuration.proxy_headers - self.pool_manager = urllib3.ProxyManager(**pool_args) - else: - self.pool_manager = urllib3.PoolManager(**pool_args) - - def request( - self, - method, - url, - headers=None, - body=None, - post_params=None, - _request_timeout=None - ): - """Perform requests. - - :param method: http request method - :param url: http request url - :param headers: http request headers - :param body: request json body, for `application/json` - :param post_params: request post parameters, - `application/x-www-form-urlencoded` - and `multipart/form-data` - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - """ - method = method.upper() - assert method in [ - 'GET', - 'HEAD', - 'DELETE', - 'POST', - 'PUT', - 'PATCH', - 'OPTIONS' - ] - - if post_params and body: - raise ApiValueError( - "body parameter cannot be used with post_params parameter." - ) - - post_params = post_params or {} - headers = headers or {} - - timeout = None - if _request_timeout: - if isinstance(_request_timeout, (int, float)): - timeout = urllib3.Timeout(total=_request_timeout) - elif ( - isinstance(_request_timeout, tuple) - and len(_request_timeout) == 2 - ): - timeout = urllib3.Timeout( - connect=_request_timeout[0], - read=_request_timeout[1] - ) - - try: - # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: - - # no content type provided or payload is json - content_type = headers.get('Content-Type') - if ( - not content_type - or re.search('json', content_type, re.IGNORECASE) - ): - request_body = None - if body is not None: - request_body = json.dumps(body) - r = self.pool_manager.request( - method, - url, - body=request_body, - timeout=timeout, - headers=headers, - preload_content=False - ) - elif content_type == 'application/x-www-form-urlencoded': - r = self.pool_manager.request( - method, - url, - fields=post_params, - encode_multipart=False, - timeout=timeout, - headers=headers, - preload_content=False - ) - elif content_type == 'multipart/form-data': - # must del headers['Content-Type'], or the correct - # Content-Type which generated by urllib3 will be - # overwritten. - del headers['Content-Type'] - # Ensures that dict objects are serialized - post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params] - r = self.pool_manager.request( - method, - url, - fields=post_params, - encode_multipart=True, - timeout=timeout, - headers=headers, - preload_content=False - ) - # Pass a `string` parameter directly in the body to support - # other content types than JSON when `body` argument is - # provided in serialized form. - elif isinstance(body, str) or isinstance(body, bytes): - r = self.pool_manager.request( - method, - url, - body=body, - timeout=timeout, - headers=headers, - preload_content=False - ) - elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool): - request_body = "true" if body else "false" - r = self.pool_manager.request( - method, - url, - body=request_body, - preload_content=False, - timeout=timeout, - headers=headers) - else: - # Cannot generate the request from given parameters - msg = """Cannot prepare a request message for provided - arguments. Please check that your arguments match - declared content type.""" - raise ApiException(status=0, reason=msg) - # For `GET`, `HEAD` - else: - r = self.pool_manager.request( - method, - url, - fields={}, - timeout=timeout, - headers=headers, - preload_content=False - ) - except urllib3.exceptions.SSLError as e: - msg = "\n".join([type(e).__name__, str(e)]) - raise ApiException(status=0, reason=msg) - - return RESTResponse(r) diff --git a/skyflow/generated/rest/tokens/__init__.py b/skyflow/generated/rest/tokens/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/skyflow/generated/rest/tokens/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/skyflow/generated/rest/tokens/client.py b/skyflow/generated/rest/tokens/client.py new file mode 100644 index 00000000..641050fe --- /dev/null +++ b/skyflow/generated/rest/tokens/client.py @@ -0,0 +1,395 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ..core.client_wrapper import SyncClientWrapper +from ..types.v_1_detokenize_record_request import V1DetokenizeRecordRequest +from ..core.request_options import RequestOptions +from ..types.v_1_detokenize_response import V1DetokenizeResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.pydantic_utilities import parse_obj_as +from ..errors.not_found_error import NotFoundError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..types.v_1_tokenize_record_request import V1TokenizeRecordRequest +from ..types.v_1_tokenize_response import V1TokenizeResponse +from ..core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class TokensClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def record_service_detokenize( + self, + vault_id: str, + *, + detokenization_parameters: typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] = OMIT, + download_url: typing.Optional[bool] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetokenizeResponse: + """ + Returns records that correspond to the specified tokens. + + Parameters + ---------- + vault_id : str + ID of the vault. + + detokenization_parameters : typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] + Detokenization details. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + continue_on_error : typing.Optional[bool] + If `true`, the detokenization request continues even if an error occurs. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetokenizeResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow, V1DetokenizeRecordRequest + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.tokens.record_service_detokenize( + vault_id="vaultID", + detokenization_parameters=[ + V1DetokenizeRecordRequest( + token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", + redaction="PLAIN_TEXT", + ), + V1DetokenizeRecordRequest( + token="05383487-fcae-42e5-a48e-5bd62a51af12", + redaction="DEFAULT", + ), + ], + download_url=False, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", + method="POST", + json={ + "detokenizationParameters": convert_and_respect_annotation_metadata( + object_=detokenization_parameters, + annotation=typing.Sequence[V1DetokenizeRecordRequest], + direction="write", + ), + "downloadURL": download_url, + "continueOnError": continue_on_error, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DetokenizeResponse, + parse_obj_as( + type_=V1DetokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def record_service_tokenize( + self, + vault_id: str, + *, + tokenization_parameters: typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1TokenizeResponse: + """ + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + Parameters + ---------- + vault_id : str + ID of the vault. + + tokenization_parameters : typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] + Tokenization details. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1TokenizeResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.tokens.record_service_tokenize( + vault_id="vaultID", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", + method="POST", + json={ + "tokenizationParameters": convert_and_respect_annotation_metadata( + object_=tokenization_parameters, + annotation=typing.Sequence[V1TokenizeRecordRequest], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1TokenizeResponse, + parse_obj_as( + type_=V1TokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncTokensClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def record_service_detokenize( + self, + vault_id: str, + *, + detokenization_parameters: typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] = OMIT, + download_url: typing.Optional[bool] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetokenizeResponse: + """ + Returns records that correspond to the specified tokens. + + Parameters + ---------- + vault_id : str + ID of the vault. + + detokenization_parameters : typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] + Detokenization details. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + continue_on_error : typing.Optional[bool] + If `true`, the detokenization request continues even if an error occurs. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetokenizeResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow, V1DetokenizeRecordRequest + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.tokens.record_service_detokenize( + vault_id="vaultID", + detokenization_parameters=[ + V1DetokenizeRecordRequest( + token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", + redaction="PLAIN_TEXT", + ), + V1DetokenizeRecordRequest( + token="05383487-fcae-42e5-a48e-5bd62a51af12", + redaction="DEFAULT", + ), + ], + download_url=False, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", + method="POST", + json={ + "detokenizationParameters": convert_and_respect_annotation_metadata( + object_=detokenization_parameters, + annotation=typing.Sequence[V1DetokenizeRecordRequest], + direction="write", + ), + "downloadURL": download_url, + "continueOnError": continue_on_error, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1DetokenizeResponse, + parse_obj_as( + type_=V1DetokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def record_service_tokenize( + self, + vault_id: str, + *, + tokenization_parameters: typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1TokenizeResponse: + """ + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + Parameters + ---------- + vault_id : str + ID of the vault. + + tokenization_parameters : typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] + Tokenization details. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1TokenizeResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.tokens.record_service_tokenize( + vault_id="vaultID", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", + method="POST", + json={ + "tokenizationParameters": convert_and_respect_annotation_metadata( + object_=tokenization_parameters, + annotation=typing.Sequence[V1TokenizeRecordRequest], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + V1TokenizeResponse, + parse_obj_as( + type_=V1TokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py new file mode 100644 index 00000000..d2112008 --- /dev/null +++ b/skyflow/generated/rest/types/__init__.py @@ -0,0 +1,91 @@ +# This file was auto-generated by Fern from our API Definition. + +from .audit_event_audit_resource_type import AuditEventAuditResourceType +from .audit_event_context import AuditEventContext +from .audit_event_data import AuditEventData +from .audit_event_http_info import AuditEventHttpInfo +from .batch_record_method import BatchRecordMethod +from .context_access_type import ContextAccessType +from .context_auth_mode import ContextAuthMode +from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from .googlerpc_status import GooglerpcStatus +from .protobuf_any import ProtobufAny +from .redaction_enum_redaction import RedactionEnumRedaction +from .request_action_type import RequestActionType +from .v_1_audit_after_options import V1AuditAfterOptions +from .v_1_audit_event_response import V1AuditEventResponse +from .v_1_audit_response import V1AuditResponse +from .v_1_audit_response_event import V1AuditResponseEvent +from .v_1_audit_response_event_request import V1AuditResponseEventRequest +from .v_1_batch_operation_response import V1BatchOperationResponse +from .v_1_batch_record import V1BatchRecord +from .v_1_bin_list_response import V1BinListResponse +from .v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from .v_1_bulk_get_record_response import V1BulkGetRecordResponse +from .v_1_byot import V1Byot +from .v_1_card import V1Card +from .v_1_delete_file_response import V1DeleteFileResponse +from .v_1_delete_record_response import V1DeleteRecordResponse +from .v_1_detokenize_record_request import V1DetokenizeRecordRequest +from .v_1_detokenize_record_response import V1DetokenizeRecordResponse +from .v_1_detokenize_response import V1DetokenizeResponse +from .v_1_field_records import V1FieldRecords +from .v_1_file_av_scan_status import V1FileAvScanStatus +from .v_1_get_auth_token_response import V1GetAuthTokenResponse +from .v_1_get_file_scan_status_response import V1GetFileScanStatusResponse +from .v_1_get_query_response import V1GetQueryResponse +from .v_1_insert_record_response import V1InsertRecordResponse +from .v_1_member_type import V1MemberType +from .v_1_record_meta_properties import V1RecordMetaProperties +from .v_1_tokenize_record_request import V1TokenizeRecordRequest +from .v_1_tokenize_record_response import V1TokenizeRecordResponse +from .v_1_tokenize_response import V1TokenizeResponse +from .v_1_update_record_response import V1UpdateRecordResponse +from .v_1_vault_field_mapping import V1VaultFieldMapping +from .v_1_vault_schema_config import V1VaultSchemaConfig + +__all__ = [ + "AuditEventAuditResourceType", + "AuditEventContext", + "AuditEventData", + "AuditEventHttpInfo", + "BatchRecordMethod", + "ContextAccessType", + "ContextAuthMode", + "DetokenizeRecordResponseValueType", + "GooglerpcStatus", + "ProtobufAny", + "RedactionEnumRedaction", + "RequestActionType", + "V1AuditAfterOptions", + "V1AuditEventResponse", + "V1AuditResponse", + "V1AuditResponseEvent", + "V1AuditResponseEventRequest", + "V1BatchOperationResponse", + "V1BatchRecord", + "V1BinListResponse", + "V1BulkDeleteRecordResponse", + "V1BulkGetRecordResponse", + "V1Byot", + "V1Card", + "V1DeleteFileResponse", + "V1DeleteRecordResponse", + "V1DetokenizeRecordRequest", + "V1DetokenizeRecordResponse", + "V1DetokenizeResponse", + "V1FieldRecords", + "V1FileAvScanStatus", + "V1GetAuthTokenResponse", + "V1GetFileScanStatusResponse", + "V1GetQueryResponse", + "V1InsertRecordResponse", + "V1MemberType", + "V1RecordMetaProperties", + "V1TokenizeRecordRequest", + "V1TokenizeRecordResponse", + "V1TokenizeResponse", + "V1UpdateRecordResponse", + "V1VaultFieldMapping", + "V1VaultSchemaConfig", +] diff --git a/skyflow/generated/rest/types/audit_event_audit_resource_type.py b/skyflow/generated/rest/types/audit_event_audit_resource_type.py new file mode 100644 index 00000000..b6c6aa0c --- /dev/null +++ b/skyflow/generated/rest/types/audit_event_audit_resource_type.py @@ -0,0 +1,39 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AuditEventAuditResourceType = typing.Union[ + typing.Literal[ + "NONE_API", + "ACCOUNT", + "AUDIT", + "BASE_DATA_TYPE", + "FIELD_TEMPLATE", + "FILE", + "KEY", + "POLICY", + "PROTO_PARSE", + "RECORD", + "ROLE", + "RULE", + "SECRET", + "SERVICE_ACCOUNT", + "TOKEN", + "USER", + "VAULT", + "VAULT_TEMPLATE", + "WORKSPACE", + "TABLE", + "POLICY_TEMPLATE", + "MEMBER", + "TAG", + "CONNECTION", + "MIGRATION", + "SCHEDULED_JOB", + "JOB", + "COLUMN_NAME", + "NETWORK_TOKEN", + "SUBSCRIPTION", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/audit_event_context.py b/skyflow/generated/rest/types/audit_event_context.py new file mode 100644 index 00000000..178137ec --- /dev/null +++ b/skyflow/generated/rest/types/audit_event_context.py @@ -0,0 +1,90 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from .v_1_member_type import V1MemberType +from .context_access_type import ContextAccessType +from .context_auth_mode import ContextAuthMode +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AuditEventContext(UniversalBaseModel): + """ + Context for an audit event. + """ + + change_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="changeID")] = pydantic.Field( + default=None + ) + """ + ID for the audit event. + """ + + request_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="requestID")] = pydantic.Field( + default=None + ) + """ + ID for the request that caused the event. + """ + + trace_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="traceID")] = pydantic.Field( + default=None + ) + """ + ID for the request set by the service that received the request. + """ + + session_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="sessionID")] = pydantic.Field( + default=None + ) + """ + ID for the session in which the request was sent. + """ + + actor: typing.Optional[str] = pydantic.Field(default=None) + """ + Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + """ + + actor_type: typing_extensions.Annotated[typing.Optional[V1MemberType], FieldMetadata(alias="actorType")] = None + access_type: typing_extensions.Annotated[typing.Optional[ContextAccessType], FieldMetadata(alias="accessType")] = ( + None + ) + ip_address: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="ipAddress")] = pydantic.Field( + default=None + ) + """ + IP Address of the client that made the request. + """ + + origin: typing.Optional[str] = pydantic.Field(default=None) + """ + HTTP Origin request header (including scheme, hostname, and port) of the request. + """ + + auth_mode: typing_extensions.Annotated[typing.Optional[ContextAuthMode], FieldMetadata(alias="authMode")] = None + jwt_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="jwtID")] = pydantic.Field( + default=None + ) + """ + ID of the JWT token. + """ + + bearer_token_context_id: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="bearerTokenContextID") + ] = pydantic.Field(default=None) + """ + Embedded User Context. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/audit_event_data.py b/skyflow/generated/rest/types/audit_event_data.py new file mode 100644 index 00000000..78385d17 --- /dev/null +++ b/skyflow/generated/rest/types/audit_event_data.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AuditEventData(UniversalBaseModel): + """ + Any Sensitive data that needs to be wrapped. + """ + + content: typing.Optional[str] = pydantic.Field(default=None) + """ + The entire body of the data requested or the query fired. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/audit_event_http_info.py b/skyflow/generated/rest/types/audit_event_http_info.py new file mode 100644 index 00000000..14df874b --- /dev/null +++ b/skyflow/generated/rest/types/audit_event_http_info.py @@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AuditEventHttpInfo(UniversalBaseModel): + uri: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="URI")] = pydantic.Field(default=None) + """ + The http URI that is used. + """ + + method: typing.Optional[str] = pydantic.Field(default=None) + """ + http method used. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/batch_record_method.py b/skyflow/generated/rest/types/batch_record_method.py new file mode 100644 index 00000000..e1882ba5 --- /dev/null +++ b/skyflow/generated/rest/types/batch_record_method.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +BatchRecordMethod = typing.Union[typing.Literal["NONE", "POST", "PUT", "GET", "DELETE"], typing.Any] diff --git a/skyflow/generated/rest/types/context_access_type.py b/skyflow/generated/rest/types/context_access_type.py new file mode 100644 index 00000000..056a10f4 --- /dev/null +++ b/skyflow/generated/rest/types/context_access_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ContextAccessType = typing.Union[typing.Literal["ACCESS_NONE", "API", "SQL"], typing.Any] diff --git a/skyflow/generated/rest/types/context_auth_mode.py b/skyflow/generated/rest/types/context_auth_mode.py new file mode 100644 index 00000000..ad630625 --- /dev/null +++ b/skyflow/generated/rest/types/context_auth_mode.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ContextAuthMode = typing.Union[typing.Literal["AUTH_NONE", "OKTA_JWT", "SERVICE_ACCOUNT_JWT", "PAT_JWT"], typing.Any] diff --git a/skyflow/generated/rest/types/detokenize_record_response_value_type.py b/skyflow/generated/rest/types/detokenize_record_response_value_type.py new file mode 100644 index 00000000..3703064a --- /dev/null +++ b/skyflow/generated/rest/types/detokenize_record_response_value_type.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetokenizeRecordResponseValueType = typing.Union[ + typing.Literal["NONE", "STRING", "INTEGER", "FLOAT", "BOOL", "DATETIME", "JSON", "ARRAY", "DATE"], typing.Any +] diff --git a/skyflow/generated/rest/types/googlerpc_status.py b/skyflow/generated/rest/types/googlerpc_status.py new file mode 100644 index 00000000..aceede7e --- /dev/null +++ b/skyflow/generated/rest/types/googlerpc_status.py @@ -0,0 +1,22 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .protobuf_any import ProtobufAny +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic + + +class GooglerpcStatus(UniversalBaseModel): + code: typing.Optional[int] = None + message: typing.Optional[str] = None + details: typing.Optional[typing.List[ProtobufAny]] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/protobuf_any.py b/skyflow/generated/rest/types/protobuf_any.py new file mode 100644 index 00000000..9d141254 --- /dev/null +++ b/skyflow/generated/rest/types/protobuf_any.py @@ -0,0 +1,21 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic + + +class ProtobufAny(UniversalBaseModel): + type: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="@type")] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/redaction_enum_redaction.py b/skyflow/generated/rest/types/redaction_enum_redaction.py new file mode 100644 index 00000000..25529a7d --- /dev/null +++ b/skyflow/generated/rest/types/redaction_enum_redaction.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RedactionEnumRedaction = typing.Union[typing.Literal["DEFAULT", "REDACTED", "MASKED", "PLAIN_TEXT"], typing.Any] diff --git a/skyflow/generated/rest/types/request_action_type.py b/skyflow/generated/rest/types/request_action_type.py new file mode 100644 index 00000000..c10fe1ce --- /dev/null +++ b/skyflow/generated/rest/types/request_action_type.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RequestActionType = typing.Union[ + typing.Literal[ + "NONE", + "ASSIGN", + "CREATE", + "DELETE", + "EXECUTE", + "LIST", + "READ", + "UNASSIGN", + "UPDATE", + "VALIDATE", + "LOGIN", + "ROTATE", + "SCHEDULEROTATION", + "SCHEDULEROTATIONALERT", + "IMPORT", + "GETIMPORTPARAMETERS", + "PING", + "GETCLOUDPROVIDER", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/v_1_audit_after_options.py b/skyflow/generated/rest/types/v_1_audit_after_options.py new file mode 100644 index 00000000..0f078667 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audit_after_options.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1AuditAfterOptions(UniversalBaseModel): + timestamp: typing.Optional[str] = pydantic.Field(default=None) + """ + Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + """ + + change_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="changeID")] = pydantic.Field( + default=None + ) + """ + Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audit_event_response.py b/skyflow/generated/rest/types/v_1_audit_event_response.py new file mode 100644 index 00000000..2ff30533 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audit_event_response.py @@ -0,0 +1,38 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from .audit_event_data import AuditEventData +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1AuditEventResponse(UniversalBaseModel): + """ + Contains fields for defining Response Properties. + """ + + code: typing.Optional[int] = pydantic.Field(default=None) + """ + The status of the overall operation. + """ + + message: typing.Optional[str] = pydantic.Field(default=None) + """ + The status message for the overall operation. + """ + + data: typing.Optional[AuditEventData] = None + timestamp: typing.Optional[str] = pydantic.Field(default=None) + """ + time when this response is generated, use extention method to set it. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audit_response.py b/skyflow/generated/rest/types/v_1_audit_response.py new file mode 100644 index 00000000..617c1fd9 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audit_response.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_audit_response_event import V1AuditResponseEvent +import pydantic +import typing_extensions +from .v_1_audit_after_options import V1AuditAfterOptions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1AuditResponse(UniversalBaseModel): + event: typing.Optional[typing.List[V1AuditResponseEvent]] = pydantic.Field(default=None) + """ + Events matching the query. + """ + + next_ops: typing_extensions.Annotated[typing.Optional[V1AuditAfterOptions], FieldMetadata(alias="nextOps")] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audit_response_event.py b/skyflow/generated/rest/types/v_1_audit_response_event.py new file mode 100644 index 00000000..b623257e --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audit_response_event.py @@ -0,0 +1,50 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .audit_event_context import AuditEventContext +from .v_1_audit_response_event_request import V1AuditResponseEventRequest +from .v_1_audit_event_response import V1AuditEventResponse +import typing_extensions +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1AuditResponseEvent(UniversalBaseModel): + """ + Audit event details. + """ + + context: typing.Optional[AuditEventContext] = None + request: typing.Optional[V1AuditResponseEventRequest] = None + response: typing.Optional[V1AuditEventResponse] = None + parent_account_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="parentAccountID")] = ( + pydantic.Field(default=None) + ) + """ + Parent account ID of the account that made the request, if any. + """ + + account_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="accountID")] = pydantic.Field( + default=None + ) + """ + ID of the account that made the request. + """ + + resource_i_ds: typing_extensions.Annotated[ + typing.Optional[typing.List[str]], FieldMetadata(alias="resourceIDs") + ] = pydantic.Field(default=None) + """ + IDs for resources involved in the event. Presented in `{resourceType}/{resourceID}` format. For example, `VAULT/cd1d815aa09b4cbfbb803bd20349f202`. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audit_response_event_request.py b/skyflow/generated/rest/types/v_1_audit_response_event_request.py new file mode 100644 index 00000000..5eb9a709 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audit_response_event_request.py @@ -0,0 +1,67 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .audit_event_data import AuditEventData +import typing_extensions +from ..core.serialization import FieldMetadata +import pydantic +from .request_action_type import RequestActionType +from .audit_event_audit_resource_type import AuditEventAuditResourceType +from .audit_event_http_info import AuditEventHttpInfo +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1AuditResponseEventRequest(UniversalBaseModel): + """ + Contains fields for defining Request Properties. + """ + + data: typing.Optional[AuditEventData] = None + api_name: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="apiName")] = pydantic.Field( + default=None + ) + """ + API name. + """ + + workspace_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="workspaceID")] = ( + pydantic.Field(default=None) + ) + """ + The workspaceID (if any) of the request. + """ + + vault_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="vaultID")] = pydantic.Field( + default=None + ) + """ + The vaultID (if any) of the request. + """ + + tags: typing.Optional[typing.List[str]] = pydantic.Field(default=None) + """ + Tags associated with the event. To provide better search capabilities. Like login. + """ + + timestamp: typing.Optional[str] = pydantic.Field(default=None) + """ + time when this request is generated, use extention method to set it. + """ + + action_type: typing_extensions.Annotated[typing.Optional[RequestActionType], FieldMetadata(alias="actionType")] = ( + None + ) + resource_type: typing_extensions.Annotated[ + typing.Optional[AuditEventAuditResourceType], FieldMetadata(alias="resourceType") + ] = None + http_info: typing_extensions.Annotated[typing.Optional[AuditEventHttpInfo], FieldMetadata(alias="httpInfo")] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_batch_operation_response.py b/skyflow/generated/rest/types/v_1_batch_operation_response.py new file mode 100644 index 00000000..72643ce2 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_batch_operation_response.py @@ -0,0 +1,33 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1BatchOperationResponse(UniversalBaseModel): + vault_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="vaultID")] = pydantic.Field( + default=None + ) + """ + ID of the vault. + """ + + responses: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field( + default=None + ) + """ + Responses in the same order as in the request. Responses have the same payload structure as their corresponding APIs:
  • `POST` returns an Insert Records response.
  • `PUT` returns an Update Record response.
  • `GET` returns a Get Record response.
  • `DELETE` returns a Delete Record response.
+ """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_batch_record.py b/skyflow/generated/rest/types/v_1_batch_record.py new file mode 100644 index 00000000..7dca5fda --- /dev/null +++ b/skyflow/generated/rest/types/v_1_batch_record.py @@ -0,0 +1,69 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +import typing_extensions +from ..core.serialization import FieldMetadata +from .batch_record_method import BatchRecordMethod +from .redaction_enum_redaction import RedactionEnumRedaction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1BatchRecord(UniversalBaseModel): + fields: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Field and value key pairs. For example, `{'field_1':'value_1', 'field_2':'value_2'}`. Only valid when `method` is `POST` or `PUT`. + """ + + table_name: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="tableName")] = pydantic.Field( + default=None + ) + """ + Name of the table to perform the operation on. + """ + + method: typing.Optional[BatchRecordMethod] = None + batch_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="batchID")] = pydantic.Field( + default=None + ) + """ + ID to group operations by. Operations in the same group are executed sequentially. + """ + + redaction: typing.Optional[RedactionEnumRedaction] = None + tokenization: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + """ + + id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="ID")] = pydantic.Field(default=None) + """ + `skyflow_id` for the record. Only valid when `method` is `GET`, `DELETE`, or `PUT`. + """ + + download_url: typing_extensions.Annotated[typing.Optional[bool], FieldMetadata(alias="downloadURL")] = ( + pydantic.Field(default=None) + ) + """ + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + """ + + upsert: typing.Optional[str] = pydantic.Field(default=None) + """ + Column that stores primary keys for upsert operations. The column must be marked as unique in the vault schema. Only valid when `method` is `POST`. + """ + + tokens: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_bin_list_response.py b/skyflow/generated/rest/types/v_1_bin_list_response.py new file mode 100644 index 00000000..bd4f69b9 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_bin_list_response.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_card import V1Card +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1BinListResponse(UniversalBaseModel): + """ + Response to the Get BIN request. + """ + + cards_data: typing.Optional[typing.List[V1Card]] = pydantic.Field(default=None) + """ + Card metadata associated with the specified BIN. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py b/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py new file mode 100644 index 00000000..6d03bccd --- /dev/null +++ b/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1BulkDeleteRecordResponse(UniversalBaseModel): + record_id_response: typing_extensions.Annotated[ + typing.Optional[typing.List[str]], FieldMetadata(alias="RecordIDResponse") + ] = pydantic.Field(default=None) + """ + IDs for the deleted records, or `*` if all records were deleted. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_bulk_get_record_response.py b/skyflow/generated/rest/types/v_1_bulk_get_record_response.py new file mode 100644 index 00000000..7244bc7f --- /dev/null +++ b/skyflow/generated/rest/types/v_1_bulk_get_record_response.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_field_records import V1FieldRecords +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1BulkGetRecordResponse(UniversalBaseModel): + records: typing.Optional[typing.List[V1FieldRecords]] = pydantic.Field(default=None) + """ + The specified records. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_byot.py b/skyflow/generated/rest/types/v_1_byot.py new file mode 100644 index 00000000..3c03bdac --- /dev/null +++ b/skyflow/generated/rest/types/v_1_byot.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +V1Byot = typing.Union[typing.Literal["DISABLE", "ENABLE", "ENABLE_STRICT"], typing.Any] diff --git a/skyflow/generated/rest/types/v_1_card.py b/skyflow/generated/rest/types/v_1_card.py new file mode 100644 index 00000000..c5a641b1 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_card.py @@ -0,0 +1,68 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1Card(UniversalBaseModel): + """ + Card metadata of the requested BIN. + """ + + bin: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="BIN")] = pydantic.Field(default=None) + """ + BIN of the card. + """ + + issuer_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Name of the card issuer bank. + """ + + country_code: typing.Optional[str] = pydantic.Field(default=None) + """ + Country code of the card. + """ + + currency: typing.Optional[str] = pydantic.Field(default=None) + """ + Currency of the card. + """ + + card_type: typing.Optional[str] = pydantic.Field(default=None) + """ + Type of the card. + """ + + card_category: typing.Optional[str] = pydantic.Field(default=None) + """ + Category of the card. + """ + + card_scheme: typing.Optional[str] = pydantic.Field(default=None) + """ + Scheme of the card. + """ + + card_last_four_digits: typing.Optional[str] = pydantic.Field(default=None) + """ + Last four digits of the card number. + """ + + card_expiry: typing.Optional[str] = pydantic.Field(default=None) + """ + Expiry date of the card. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_delete_file_response.py b/skyflow/generated/rest/types/v_1_delete_file_response.py new file mode 100644 index 00000000..6e995cec --- /dev/null +++ b/skyflow/generated/rest/types/v_1_delete_file_response.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1DeleteFileResponse(UniversalBaseModel): + skyflow_id: typing.Optional[str] = pydantic.Field(default=None) + """ + ID of the record. + """ + + deleted: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, the file was deleted. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_delete_record_response.py b/skyflow/generated/rest/types/v_1_delete_record_response.py new file mode 100644 index 00000000..366cb30b --- /dev/null +++ b/skyflow/generated/rest/types/v_1_delete_record_response.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1DeleteRecordResponse(UniversalBaseModel): + skyflow_id: typing.Optional[str] = pydantic.Field(default=None) + """ + ID of the deleted record. + """ + + deleted: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, the record was deleted. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detokenize_record_request.py b/skyflow/generated/rest/types/v_1_detokenize_record_request.py new file mode 100644 index 00000000..b6e225c3 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detokenize_record_request.py @@ -0,0 +1,25 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from .redaction_enum_redaction import RedactionEnumRedaction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1DetokenizeRecordRequest(UniversalBaseModel): + token: typing.Optional[str] = pydantic.Field(default=None) + """ + Token that identifies the record to detokenize. + """ + + redaction: typing.Optional[RedactionEnumRedaction] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detokenize_record_response.py b/skyflow/generated/rest/types/v_1_detokenize_record_response.py new file mode 100644 index 00000000..bbc26aa0 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detokenize_record_response.py @@ -0,0 +1,38 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +import typing_extensions +from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1DetokenizeRecordResponse(UniversalBaseModel): + token: typing.Optional[str] = pydantic.Field(default=None) + """ + Token of the record. + """ + + value_type: typing_extensions.Annotated[ + typing.Optional[DetokenizeRecordResponseValueType], FieldMetadata(alias="valueType") + ] = None + value: typing.Optional[str] = pydantic.Field(default=None) + """ + Data corresponding to the token. + """ + + error: typing.Optional[str] = pydantic.Field(default=None) + """ + Error if token isn't found. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detokenize_response.py b/skyflow/generated/rest/types/v_1_detokenize_response.py new file mode 100644 index 00000000..63e97c84 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detokenize_response.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_detokenize_record_response import V1DetokenizeRecordResponse +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1DetokenizeResponse(UniversalBaseModel): + records: typing.Optional[typing.List[V1DetokenizeRecordResponse]] = pydantic.Field(default=None) + """ + Records corresponding to the specified tokens. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_field_records.py b/skyflow/generated/rest/types/v_1_field_records.py new file mode 100644 index 00000000..07a8bf58 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_field_records.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1FieldRecords(UniversalBaseModel): + """ + Record values and tokens. + """ + + fields: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Fields and values for the record. For example, `{'field_1':'value_1', 'field_2':'value_2'}`. + """ + + tokens: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_file_av_scan_status.py b/skyflow/generated/rest/types/v_1_file_av_scan_status.py new file mode 100644 index 00000000..78712507 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_file_av_scan_status.py @@ -0,0 +1,18 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +V1FileAvScanStatus = typing.Union[ + typing.Literal[ + "SCAN_NONE", + "SCAN_CLEAN", + "SCAN_INFECTED", + "SCAN_DELETED", + "SCAN_ERROR", + "SCAN_PENDING", + "SCAN_UNSCANNABLE", + "SCAN_FILE_NOT_FOUND", + "SCAN_INVALID", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/v_1_get_auth_token_response.py b/skyflow/generated/rest/types/v_1_get_auth_token_response.py new file mode 100644 index 00000000..d414ed7c --- /dev/null +++ b/skyflow/generated/rest/types/v_1_get_auth_token_response.py @@ -0,0 +1,33 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing_extensions +import typing +from ..core.serialization import FieldMetadata +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1GetAuthTokenResponse(UniversalBaseModel): + access_token: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="accessToken")] = ( + pydantic.Field(default=None) + ) + """ + AccessToken. + """ + + token_type: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="tokenType")] = pydantic.Field( + default=None + ) + """ + TokenType : Bearer. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py b/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py new file mode 100644 index 00000000..71349961 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_file_av_scan_status import V1FileAvScanStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic + + +class V1GetFileScanStatusResponse(UniversalBaseModel): + av_scan_status: typing.Optional[V1FileAvScanStatus] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_get_query_response.py b/skyflow/generated/rest/types/v_1_get_query_response.py new file mode 100644 index 00000000..778a517a --- /dev/null +++ b/skyflow/generated/rest/types/v_1_get_query_response.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_field_records import V1FieldRecords +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1GetQueryResponse(UniversalBaseModel): + records: typing.Optional[typing.List[V1FieldRecords]] = pydantic.Field(default=None) + """ + Records returned by the query. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_insert_record_response.py b/skyflow/generated/rest/types/v_1_insert_record_response.py new file mode 100644 index 00000000..a3344c92 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_insert_record_response.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_record_meta_properties import V1RecordMetaProperties +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1InsertRecordResponse(UniversalBaseModel): + records: typing.Optional[typing.List[V1RecordMetaProperties]] = pydantic.Field(default=None) + """ + Identifiers for the inserted records. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_member_type.py b/skyflow/generated/rest/types/v_1_member_type.py new file mode 100644 index 00000000..4f862413 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_member_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +V1MemberType = typing.Union[typing.Literal["NONE", "USER", "SERVICE_ACCOUNT"], typing.Any] diff --git a/skyflow/generated/rest/types/v_1_record_meta_properties.py b/skyflow/generated/rest/types/v_1_record_meta_properties.py new file mode 100644 index 00000000..a4eb95b7 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_record_meta_properties.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1RecordMetaProperties(UniversalBaseModel): + skyflow_id: typing.Optional[str] = pydantic.Field(default=None) + """ + ID of the inserted record. + """ + + tokens: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Tokens for the record. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_tokenize_record_request.py b/skyflow/generated/rest/types/v_1_tokenize_record_request.py new file mode 100644 index 00000000..9fba53a2 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_tokenize_record_request.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1TokenizeRecordRequest(UniversalBaseModel): + value: typing.Optional[str] = pydantic.Field(default=None) + """ + Existing value to return a token for. + """ + + column_group: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="columnGroup")] = ( + pydantic.Field(default=None) + ) + """ + Name of the column group that the value belongs to. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_tokenize_record_response.py b/skyflow/generated/rest/types/v_1_tokenize_record_response.py new file mode 100644 index 00000000..c105e9fc --- /dev/null +++ b/skyflow/generated/rest/types/v_1_tokenize_record_response.py @@ -0,0 +1,22 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1TokenizeRecordResponse(UniversalBaseModel): + token: typing.Optional[str] = pydantic.Field(default=None) + """ + Token corresponding to a value. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_tokenize_response.py b/skyflow/generated/rest/types/v_1_tokenize_response.py new file mode 100644 index 00000000..0e1886b4 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_tokenize_response.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from .v_1_tokenize_record_response import V1TokenizeRecordResponse +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1TokenizeResponse(UniversalBaseModel): + records: typing.Optional[typing.List[V1TokenizeRecordResponse]] = pydantic.Field(default=None) + """ + Tokens corresponding to the specified values. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_update_record_response.py b/skyflow/generated/rest/types/v_1_update_record_response.py new file mode 100644 index 00000000..be6da8fb --- /dev/null +++ b/skyflow/generated/rest/types/v_1_update_record_response.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1UpdateRecordResponse(UniversalBaseModel): + skyflow_id: typing.Optional[str] = pydantic.Field(default=None) + """ + ID of the updated record. + """ + + tokens: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Tokens for the record. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_vault_field_mapping.py b/skyflow/generated/rest/types/v_1_vault_field_mapping.py new file mode 100644 index 00000000..a567d639 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_vault_field_mapping.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1VaultFieldMapping(UniversalBaseModel): + """ + Mapping of the fields in the vault to the fields to use for the lookup. + """ + + card_number: typing.Optional[str] = pydantic.Field(default=None) + """ + Name of the column that stores the card number. + """ + + card_last_four_digits: typing.Optional[str] = pydantic.Field(default=None) + """ + Name of the column that stores the card number suffix. + """ + + card_expiry: typing.Optional[str] = pydantic.Field(default=None) + """ + Name of the column that stores the expiry date. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_vault_schema_config.py b/skyflow/generated/rest/types/v_1_vault_schema_config.py new file mode 100644 index 00000000..a3f3f0b6 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_vault_schema_config.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +import pydantic +from .v_1_vault_field_mapping import V1VaultFieldMapping +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class V1VaultSchemaConfig(UniversalBaseModel): + """ + Details of the vault that stores additional card details. + """ + + id: typing.Optional[str] = pydantic.Field(default=None) + """ + ID of the vault that stores card details. + """ + + table_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Name of the table that stores card details. + """ + + mapping: typing.Optional[V1VaultFieldMapping] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/version.py b/skyflow/generated/rest/version.py new file mode 100644 index 00000000..f8d02ff4 --- /dev/null +++ b/skyflow/generated/rest/version.py @@ -0,0 +1 @@ +__version__ = '2.0.0b1.dev0+3d4ee51' diff --git a/skyflow/service_account/_utils.py b/skyflow/service_account/_utils.py index 78617670..715716d8 100644 --- a/skyflow/service_account/_utils.py +++ b/skyflow/service_account/_utils.py @@ -3,7 +3,6 @@ import time import jwt from skyflow.error import SkyflowError -from skyflow.generated.rest.models import V1GetAuthTokenRequest from skyflow.service_account.client.auth_client import AuthClient from skyflow.utils.logger import log_info, log_error_log from skyflow.utils import get_base_url, format_scope, SkyflowMessages @@ -89,10 +88,9 @@ def get_service_account_token(credentials, options, logger): if options and "role_ids" in options: formatted_scope = format_scope(options.get("role_ids")) - request = V1GetAuthTokenRequest(assertion = signed_token, + response = auth_api.authentication_service_get_auth_token(assertion = signed_token, grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", scope=formatted_scope) - response = auth_api.authentication_service_get_auth_token(request) log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_SUCCESS.value, logger) return response.access_token, response.token_type diff --git a/skyflow/service_account/client/auth_client.py b/skyflow/service_account/client/auth_client.py index c1cc9cb2..948e05f3 100644 --- a/skyflow/service_account/client/auth_client.py +++ b/skyflow/service_account/client/auth_client.py @@ -1,18 +1,13 @@ -from skyflow.generated.rest import Configuration, ApiClient -from skyflow.generated.rest.api import AuthenticationApi - +from skyflow.generated.rest.client import Skyflow +from skyflow.utils.constants import OPTIONAL_TOKEN class AuthClient: def __init__(self, url): self.__url = url - self.__client_configuration = self.initialize_client_configuration() self.__api_client = self.initialize_api_client() - def initialize_client_configuration(self): - return Configuration(host=self.__url) - def initialize_api_client(self): - return ApiClient(self.__client_configuration) + return Skyflow(base_url=self.__url, token=OPTIONAL_TOKEN) def get_auth_api(self): - return AuthenticationApi(self.__api_client) \ No newline at end of file + return self.__api_client.authentication \ No newline at end of file diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 2261d3e6..8f035e93 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -13,8 +13,9 @@ from skyflow.error import SkyflowError from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse -from skyflow.utils.logger import log_error, log_error_log +from skyflow.utils.logger import log_error_log from . import SkyflowMessages, SDK_VERSION +from .constants import PROTOCOL from .enums import Env, ContentType, EnvUrls from skyflow.vault.data import InsertResponse, UpdateResponse, DeleteResponse, QueryResponse, GetResponse from .validations import validate_invoke_connection_params @@ -61,7 +62,7 @@ def get_vault_url(cluster_id, env,vault_id, logger = None): raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) base_url = EnvUrls[env.name].value - protocol = "https" if env != Env.PROD else "http" + protocol = PROTOCOL return f"{protocol}://{cluster_id}.{base_url}" diff --git a/skyflow/utils/constants.py b/skyflow/utils/constants.py new file mode 100644 index 00000000..fea57008 --- /dev/null +++ b/skyflow/utils/constants.py @@ -0,0 +1,2 @@ +OPTIONAL_TOKEN='token' +PROTOCOL='https' \ No newline at end of file diff --git a/skyflow/utils/enums/env.py b/skyflow/utils/enums/env.py index 862f8f8a..1f2f7f17 100644 --- a/skyflow/utils/enums/env.py +++ b/skyflow/utils/enums/env.py @@ -1,13 +1,13 @@ from enum import Enum class Env(Enum): - DEV = 'DEV', - SANDBOX = 'SANDBOX', + DEV = 'DEV' + SANDBOX = 'SANDBOX' PROD = 'PROD' STAGE = 'STAGE' class EnvUrls(Enum): - PROD = "vault.skyflowapis.com", - SANDBOX = "vault.skyflowapis-preview.com", + PROD = "vault.skyflowapis.com" + SANDBOX = "vault.skyflowapis-preview.com" DEV = "vault.skyflowapis.dev" STAGE = "vault.skyflowapis.tech" \ No newline at end of file diff --git a/skyflow/utils/enums/redaction_type.py b/skyflow/utils/enums/redaction_type.py index 85310048..1780e820 100644 --- a/skyflow/utils/enums/redaction_type.py +++ b/skyflow/utils/enums/redaction_type.py @@ -1,8 +1,7 @@ from enum import Enum -from skyflow.generated.rest import RedactionEnumREDACTION class RedactionType(Enum): - PLAIN_TEXT = RedactionEnumREDACTION.PLAIN_TEXT - MASKED = RedactionEnumREDACTION.MASKED - DEFAULT = RedactionEnumREDACTION.DEFAULT - REDACTED = RedactionEnumREDACTION.REDACTED + PLAIN_TEXT = 'PLAIN_TEXT' + MASKED = 'MASKED' + DEFAULT = 'DEFAULT' + REDACTED = 'REDACTED' diff --git a/skyflow/utils/enums/token_mode.py b/skyflow/utils/enums/token_mode.py index 650f9a96..a073b125 100644 --- a/skyflow/utils/enums/token_mode.py +++ b/skyflow/utils/enums/token_mode.py @@ -1,7 +1,6 @@ from enum import Enum -from skyflow.generated.rest import V1BYOT class TokenMode(Enum): - DISABLE = V1BYOT.DISABLE - ENABLE = V1BYOT.ENABLE - ENABLE_STRICT = V1BYOT.ENABLE_STRICT \ No newline at end of file + DISABLE = "DISABLE" + ENABLE = "ENABLE" + ENABLE_STRICT = "ENABLE_STRICT" \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index 5b7827a9..93d10468 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -514,16 +514,20 @@ def validate_detokenize_request(logger, request): raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value, invalid_input_error_code) for item in request.data: - if 'token' not in item or 'redaction' not in item: - raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.data)), invalid_input_error_code) + if 'token' not in item: + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value.format(type(request.data)), + invalid_input_error_code) + token = item.get('token') - redaction = item.get('redaction') + redaction = item.get('redaction', None) if not isinstance(token, str) or not token: - raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_TYPE.value.format("DETOKENIZE"), invalid_input_error_code) + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_TYPE.value.format("DETOKENIZE"), + invalid_input_error_code) - if not isinstance(redaction, RedactionType) or not redaction: - raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(redaction)), invalid_input_error_code) + if redaction is not None and not isinstance(redaction, RedactionType): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(redaction)), + invalid_input_error_code) def validate_tokenize_request(logger, request): parameters = request.values diff --git a/skyflow/vault/client/client.py b/skyflow/vault/client/client.py index 34a9374a..e3e543ae 100644 --- a/skyflow/vault/client/client.py +++ b/skyflow/vault/client/client.py @@ -1,5 +1,4 @@ -import json -from skyflow.generated.rest import Configuration, RecordsApi, ApiClient, TokensApi, QueryApi +from skyflow.generated.rest.client import Skyflow from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired from skyflow.utils import get_vault_url, get_credentials, SkyflowMessages from skyflow.utils.logger import log_info @@ -30,20 +29,19 @@ def initialize_client_configuration(self): self.__config.get("env"), self.__config.get("vault_id"), logger = self.__logger) - self.__client_configuration = Configuration(host=vault_url, access_token=token) - self.initialize_api_client(self.__client_configuration) + self.initialize_api_client(vault_url, token) - def initialize_api_client(self, config): - self.__api_client = ApiClient(config) + def initialize_api_client(self, vault_url, token): + self.__api_client = Skyflow(base_url=vault_url, token=token) def get_records_api(self): - return RecordsApi(self.__api_client) + return self.__api_client.records def get_tokens_api(self): - return TokensApi(self.__api_client) + return self.__api_client.tokens def get_query_api(self): - return QueryApi(self.__api_client) + return self.__api_client.query def get_vault_id(self): return self.__config.get("vault_id") diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index 9867443f..cabd82db 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -1,11 +1,9 @@ -from skyflow.generated.rest import V1FieldRecords, RecordServiceInsertRecordBody, V1DetokenizeRecordRequest, \ - V1DetokenizePayload, V1TokenizeRecordRequest, V1TokenizePayload, QueryServiceExecuteQueryBody, \ - RecordServiceBulkDeleteRecordBody, RecordServiceUpdateRecordBody, RecordServiceBatchOperationBody, V1BatchRecord, \ - BatchRecordMethod -from skyflow.generated.rest.exceptions import BadRequestException, UnauthorizedException, ForbiddenException +from skyflow.generated.rest import V1FieldRecords, V1BatchRecord, V1TokenizeRecordRequest, \ + V1DetokenizeRecordRequest from skyflow.utils import SkyflowMessages, parse_insert_response, \ handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, \ parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values +from skyflow.utils.enums import RequestMethod from skyflow.utils.logger import log_info, log_error_log from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request @@ -30,8 +28,6 @@ def __build_bulk_field_records(self, values, tokens=None): fields=value, tokens=token ) - if token is not None: - bulk_record.tokens = token bulk_record_list.append(bulk_record) return bulk_record_list @@ -42,7 +38,7 @@ def __build_batch_field_records(self, values, tokens, table_name, return_tokens, batch_record = V1BatchRecord( fields=value, table_name=table_name, - method=BatchRecordMethod.POST, + method=RequestMethod.POST.value, tokenization=return_tokens, upsert=upsert, tokens=token @@ -61,21 +57,11 @@ def __build_insert_body(self, request: InsertRequest): request.return_tokens, request.upsert ) - body = RecordServiceBatchOperationBody( - records=records_list, - continue_on_error=request.continue_on_error, - byot=request.token_mode.value - ) - return body + + return records_list else: records_list = self.__build_bulk_field_records(request.values, request.tokens) - return RecordServiceInsertRecordBody( - records=records_list, - tokenization=request.return_tokens, - upsert=request.upsert, - homogeneous=request.homogeneous, - byot=request.token_mode.value - ) + return records_list def insert(self, request: InsertRequest): log_info(SkyflowMessages.Info.VALIDATE_INSERT_REQUEST.value, self.__vault_client.get_logger()) @@ -87,26 +73,21 @@ def insert(self, request: InsertRequest): try: log_info(SkyflowMessages.Info.INSERT_TRIGGERED.value, self.__vault_client.get_logger()) - if request.continue_on_error: api_response = records_api.record_service_batch_operation(self.__vault_client.get_vault_id(), - insert_body) + records=insert_body, continue_on_error=request.continue_on_error, byot=request.token_mode.value) else: api_response = records_api.record_service_insert_record(self.__vault_client.get_vault_id(), - request.table_name, insert_body) + request.table_name, records=insert_body,tokenization= request.return_tokens, upsert=request.upsert, homogeneous=request.homogeneous, byot=request.token_mode.value) insert_response = parse_insert_response(api_response, request.continue_on_error) log_info(SkyflowMessages.Info.INSERT_SUCCESS.value, self.__vault_client.get_logger()) return insert_response - except BadRequestException as e: + except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.INSERT_RECORDS_REJECTED.value, self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def update(self, request: UpdateRequest): log_info(SkyflowMessages.Info.VALIDATE_UPDATE_REQUEST.value, self.__vault_client.get_logger()) @@ -115,7 +96,6 @@ def update(self, request: UpdateRequest): self.__initialize() field = {key: value for key, value in request.data.items() if key != "skyflow_id"} record = V1FieldRecords(fields=field, tokens = request.tokens) - payload = RecordServiceUpdateRecordBody(record=record, tokenization=request.return_tokens, byot=request.token_mode.value) records_api = self.__vault_client.get_records_api() try: @@ -123,8 +103,10 @@ def update(self, request: UpdateRequest): api_response = records_api.record_service_update_record( self.__vault_client.get_vault_id(), request.table, - request.data.get("skyflow_id"), - payload + id=request.data.get("skyflow_id"), + record=record, + tokenization=request.return_tokens, + byot=request.token_mode.value ) log_info(SkyflowMessages.Info.UPDATE_SUCCESS.value, self.__vault_client.get_logger()) update_response = parse_update_record_response(api_response) @@ -132,24 +114,19 @@ def update(self, request: UpdateRequest): except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.UPDATE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def delete(self, request: DeleteRequest): log_info(SkyflowMessages.Info.VALIDATING_DELETE_REQUEST.value, self.__vault_client.get_logger()) validate_delete_request(self.__vault_client.get_logger(), request) log_info(SkyflowMessages.Info.DELETE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() - payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) records_api = self.__vault_client.get_records_api() try: log_info(SkyflowMessages.Info.DELETE_TRIGGERED.value, self.__vault_client.get_logger()) api_response = records_api.record_service_bulk_delete_record( self.__vault_client.get_vault_id(), request.table, - payload + skyflow_ids=request.ids ) log_info(SkyflowMessages.Info.DELETE_SUCCESS.value, self.__vault_client.get_logger()) delete_response = parse_delete_response(api_response) @@ -157,12 +134,6 @@ def delete(self, request: DeleteRequest): except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, - logger=self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def get(self, request: GetRequest): log_info(SkyflowMessages.Info.VALIDATE_GET_REQUEST.value, self.__vault_client.get_logger()) @@ -193,24 +164,18 @@ def get(self, request: GetRequest): except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def query(self, request: QueryRequest): log_info(SkyflowMessages.Info.VALIDATING_QUERY_REQUEST.value, self.__vault_client.get_logger()) validate_query_request(self.__vault_client.get_logger(), request) log_info(SkyflowMessages.Info.QUERY_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() - payload = QueryServiceExecuteQueryBody(query=request.query) query_api = self.__vault_client.get_query_api() try: log_info(SkyflowMessages.Info.QUERY_TRIGGERED.value, self.__vault_client.get_logger()) api_response = query_api.query_service_execute_query( self.__vault_client.get_vault_id(), - payload + query=request.query ) log_info(SkyflowMessages.Info.QUERY_SUCCESS.value, self.__vault_client.get_logger()) query_response = parse_query_response(api_response) @@ -218,11 +183,6 @@ def query(self, request: QueryRequest): except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def detokenize(self, request: DetokenizeRequest): log_info(SkyflowMessages.Info.VALIDATE_DETOKENIZE_REQUEST.value, self.__vault_client.get_logger()) @@ -230,16 +190,19 @@ def detokenize(self, request: DetokenizeRequest): log_info(SkyflowMessages.Info.DETOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() tokens_list = [ - V1DetokenizeRecordRequest(token=item.get('token'), redaction=item.get('redaction').value) + V1DetokenizeRecordRequest( + token=item.get('token'), + redaction=item.get('redaction', None) + ) for item in request.data ] - payload = V1DetokenizePayload(detokenization_parameters=tokens_list, continue_on_error=request.continue_on_error) tokens_api = self.__vault_client.get_tokens_api() try: log_info(SkyflowMessages.Info.DETOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) api_response = tokens_api.record_service_detokenize( self.__vault_client.get_vault_id(), - detokenize_payload=payload + detokenization_parameters=tokens_list, + continue_on_error = request.continue_on_error ) log_info(SkyflowMessages.Info.DETOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) detokenize_response = parse_detokenize_response(api_response) @@ -247,12 +210,6 @@ def detokenize(self, request: DetokenizeRequest): except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, - logger=self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: - handle_exception(e, self.__vault_client.get_logger()) def tokenize(self, request: TokenizeRequest): log_info(SkyflowMessages.Info.VALIDATING_TOKENIZE_REQUEST.value, self.__vault_client.get_logger()) @@ -264,23 +221,16 @@ def tokenize(self, request: TokenizeRequest): V1TokenizeRecordRequest(value=item["value"], column_group=item["column_group"]) for item in request.values ] - payload = V1TokenizePayload(tokenization_parameters=records_list) tokens_api = self.__vault_client.get_tokens_api() try: log_info(SkyflowMessages.Info.TOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) api_response = tokens_api.record_service_tokenize( self.__vault_client.get_vault_id(), - tokenize_payload=payload + tokenization_parameters=records_list ) tokenize_response = parse_tokenize_response(api_response) log_info(SkyflowMessages.Info.TOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) return tokenize_response except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except UnauthorizedException as e: - log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, - logger=self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) - except ForbiddenException as e: handle_exception(e, self.__vault_client.get_logger()) \ No newline at end of file diff --git a/skyflow/vault/tokens/_detokenize_request.py b/skyflow/vault/tokens/_detokenize_request.py index 73a5368e..d6a9ed24 100644 --- a/skyflow/vault/tokens/_detokenize_request.py +++ b/skyflow/vault/tokens/_detokenize_request.py @@ -1,5 +1,3 @@ -from skyflow.utils.enums.redaction_type import RedactionType - class DetokenizeRequest: def __init__(self, data, continue_on_error = False): self.data = data diff --git a/tests/client/test_skyflow.py b/tests/client/test_skyflow.py index 621cdee0..3e3681bb 100644 --- a/tests/client/test_skyflow.py +++ b/tests/client/test_skyflow.py @@ -65,7 +65,7 @@ def test_remove_vault_config_valid(self): self.assertNotIn(VALID_VAULT_CONFIG['vault_id'], self.builder._Builder__vault_configs) - @patch('skyflow.client.skyflow.log_error') + @patch('skyflow.utils.logger.log_error') def test_remove_vault_config_invalid(self, mock_log_error): self.builder.add_vault_config(VALID_VAULT_CONFIG) self.builder.build() @@ -159,7 +159,7 @@ def test_remove_connection_config_valid(self): self.assertNotIn(VALID_CONNECTION_CONFIG.get("connection_id"), self.builder._Builder__connection_configs) - @patch('skyflow.client.skyflow.log_error') + @patch('skyflow.utils.logger.log_error') def test_remove_connection_config_invalid(self, mock_log_error): self.builder.add_connection_config(VALID_CONNECTION_CONFIG) self.builder.build() diff --git a/tests/vault/client/test__client.py b/tests/vault/client/test__client.py index cc2e2d42..565b1e6f 100644 --- a/tests/vault/client/test__client.py +++ b/tests/vault/client/test__client.py @@ -1,6 +1,5 @@ import unittest from unittest.mock import patch, MagicMock -from skyflow.generated.rest import Configuration from skyflow.vault.client.client import VaultClient CONFIG = { @@ -31,10 +30,8 @@ def test_set_logger(self): @patch("skyflow.vault.client.client.get_credentials") @patch("skyflow.vault.client.client.get_vault_url") - @patch("skyflow.vault.client.client.Configuration") @patch("skyflow.vault.client.client.VaultClient.initialize_api_client") - def test_initialize_client_configuration(self, mock_init_api_client, mock_config, mock_get_vault_url, - mock_get_credentials): + def test_initialize_client_configuration(self, mock_init_api_client, mock_get_vault_url, mock_get_credentials): mock_get_credentials.return_value = (CREDENTIALS_WITH_API_KEY) mock_get_vault_url.return_value = "https://test-vault-url.com" @@ -42,32 +39,30 @@ def test_initialize_client_configuration(self, mock_init_api_client, mock_config mock_get_credentials.assert_called_once_with(CONFIG["credentials"], None, logger=None) mock_get_vault_url.assert_called_once_with(CONFIG["cluster_id"], CONFIG["env"], CONFIG["vault_id"], logger=None) - mock_config.assert_called_once_with(host="https://test-vault-url.com", access_token="dummy_api_key") mock_init_api_client.assert_called_once() - @patch("skyflow.vault.client.client.ApiClient") + @patch("skyflow.vault.client.client.Skyflow") def test_initialize_api_client(self, mock_api_client): - config = Configuration() - self.vault_client.initialize_api_client(config) - mock_api_client.assert_called_once_with(config) - - @patch("skyflow.vault.client.client.RecordsApi") - def test_get_records_api(self, mock_records_api): - self.vault_client.initialize_api_client(Configuration()) - self.vault_client.get_records_api() - mock_records_api.assert_called_once() - - @patch("skyflow.vault.client.client.TokensApi") - def test_get_tokens_api(self, mock_tokens_api): - self.vault_client.initialize_api_client(Configuration()) - self.vault_client.get_tokens_api() - mock_tokens_api.assert_called_once() - - @patch("skyflow.vault.client.client.QueryApi") - def test_get_query_api(self, mock_query_api): - self.vault_client.initialize_api_client(Configuration()) - self.vault_client.get_query_api() - mock_query_api.assert_called_once() + self.vault_client.initialize_api_client("https://test-vault-url.com", "dummy_token") + mock_api_client.assert_called_once_with(base_url="https://test-vault-url.com", token="dummy_token") + + def test_get_records_api(self): + self.vault_client._VaultClient__api_client = MagicMock() + self.vault_client._VaultClient__api_client.records = MagicMock() + records_api = self.vault_client.get_records_api() + self.assertIsNotNone(records_api) + + def test_get_tokens_api(self): + self.vault_client._VaultClient__api_client = MagicMock() + self.vault_client._VaultClient__api_client.tokens = MagicMock() + tokens_api = self.vault_client.get_tokens_api() + self.assertIsNotNone(tokens_api) + + def test_get_query_api(self): + self.vault_client._VaultClient__api_client = MagicMock() + self.vault_client._VaultClient__api_client.query = MagicMock() + query_api = self.vault_client.get_query_api() + self.assertIsNotNone(query_api) def test_get_vault_id(self): self.assertEqual(self.vault_client.get_vault_id(), CONFIG["vault_id"]) diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 6e0805e0..89046e65 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -1,8 +1,6 @@ import unittest from unittest.mock import Mock, patch -from skyflow.generated.rest import RecordServiceBatchOperationBody, V1BatchRecord, RecordServiceInsertRecordBody, \ - V1FieldRecords, RecordServiceUpdateRecordBody, RecordServiceBulkDeleteRecordBody, QueryServiceExecuteQueryBody, \ - V1DetokenizeRecordRequest, V1DetokenizePayload, V1TokenizePayload, V1TokenizeRecordRequest, RedactionEnumREDACTION +from skyflow.generated.rest import V1BatchRecord, V1FieldRecords, V1DetokenizeRecordRequest, V1TokenizeRecordRequest from skyflow.utils.enums import RedactionType, TokenMode from skyflow.vault.controller import Vault from skyflow.vault.data import InsertRequest, InsertResponse, UpdateResponse, UpdateRequest, DeleteResponse, \ @@ -38,19 +36,15 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) continue_on_error=True ) - expected_body = RecordServiceBatchOperationBody( - records=[ - V1BatchRecord( - fields={"field": "value"}, - table_name=TABLE_NAME, - method="POST", - tokenization=True, - upsert="column_name" - ) - ], - continue_on_error=True, - byot="DISABLE" - ) + expected_body = [ + V1BatchRecord( + fields={"field": "value"}, + table_name=TABLE_NAME, + method="POST", + tokenization=True, + upsert="column_name" + ) + ] # Mock API response to contain a mix of successful and failed insertions mock_api_response = Mock() @@ -78,7 +72,12 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_batch_operation.assert_called_once_with(VAULT_ID, expected_body) + records_api.record_service_batch_operation.assert_called_once_with( + VAULT_ID, + records=expected_body, + continue_on_error=True, + byot="DISABLE" + ) mock_parse_response.assert_called_once_with(mock_api_response, True) # Assert that the result matches the expected InsertResponse @@ -102,14 +101,9 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val ) # Expected API request body based on InsertRequest parameters - expected_body = RecordServiceInsertRecordBody( - records=[ - V1FieldRecords(fields={"field": "value"}) - ], - tokenization=True, - upsert=None, - homogeneous=True - ) + expected_body = [ + V1FieldRecords(fields={"field": "value"}) + ] # Mock API response for a successful insert mock_api_response = Mock() @@ -129,14 +123,34 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_insert_record.assert_called_once_with(VAULT_ID, TABLE_NAME, - expected_body) + records_api.record_service_insert_record.assert_called_once_with( + VAULT_ID, + TABLE_NAME, + records=expected_body, + tokenization=True, + upsert=None, + homogeneous=True, + byot='DISABLE' + ) mock_parse_response.assert_called_once_with(mock_api_response, False) # Assert that the result matches the expected InsertResponse self.assertEqual(result.inserted_fields, expected_inserted_fields) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_insert_request") + def test_insert_handles_generic_error(self, mock_validate): + request = InsertRequest(table_name="test_table", values=[{"column_name": "value"}], return_tokens=False, + upsert=False, + homogeneous=False, continue_on_error=False, token_mode=Mock()) + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_insert_record.side_effect = Exception("Generic Exception") + + with self.assertRaises(Exception): + self.vault.insert(request) + + records_api.record_service_insert_record.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_insert_request") @patch("skyflow.vault.controller._vault.parse_insert_response") def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock_parse_response, mock_validate): @@ -154,14 +168,9 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock ) # Expected API request body based on InsertRequest parameters - expected_body = RecordServiceInsertRecordBody( - records=[ - V1FieldRecords(fields={"field": "value"}, tokens={"token_field": "token_val1"}) - ], - tokenization=True, - upsert=None, - homogeneous=True - ) + expected_body = [ + V1FieldRecords(fields={"field": "value"}, tokens={"token_field": "token_val1"}) + ] # Mock API response for a successful insert mock_api_response = Mock() @@ -181,8 +190,15 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_insert_record.assert_called_once_with(VAULT_ID, TABLE_NAME, - expected_body) + records_api.record_service_insert_record.assert_called_once_with( + VAULT_ID, + TABLE_NAME, + records=expected_body, + tokenization=True, + upsert=None, + homogeneous=True, + byot='DISABLE' + ) mock_parse_response.assert_called_once_with(mock_api_response, False) # Assert that the result matches the expected InsertResponse @@ -204,14 +220,7 @@ def test_update_successful(self, mock_parse_response, mock_validate): ) # Expected payload - expected_payload = RecordServiceUpdateRecordBody( - record=V1FieldRecords( - fields={"field": "new_value"}, - tokens=request.tokens - ), - tokenization=request.return_tokens, - byot=request.token_mode.value - ) + expected_record = V1FieldRecords(fields={"field": "new_value"}, tokens=None) # Mock API response mock_api_response = Mock() @@ -234,9 +243,11 @@ def test_update_successful(self, mock_parse_response, mock_validate): mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) records_api.record_service_update_record.assert_called_once_with( VAULT_ID, - request.table, - request.data["skyflow_id"], - expected_payload + TABLE_NAME, + id="12345", + record=expected_record, + tokenization=True, + byot="DISABLE" ) mock_parse_response.assert_called_once_with(mock_api_response) @@ -244,6 +255,18 @@ def test_update_successful(self, mock_parse_response, mock_validate): self.assertEqual(result.updated_field, expected_updated_field) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_update_request") + def test_update_handles_generic_error(self, mock_validate): + request = UpdateRequest(table="test_table", data={"skyflow_id": "123", "field": "value"}, + return_tokens=False) + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_update_record.side_effect = Exception("Generic Exception") + + with self.assertRaises(Exception): + self.vault.update(request) + + records_api.record_service_update_record.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_delete_request") @patch("skyflow.vault.controller._vault.parse_delete_response") def test_delete_successful(self, mock_parse_response, mock_validate): @@ -256,7 +279,7 @@ def test_delete_successful(self, mock_parse_response, mock_validate): ) # Expected payload - expected_payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) + expected_payload = ["12345", "67890"] # Mock API response mock_api_response = Mock() @@ -278,8 +301,8 @@ def test_delete_successful(self, mock_parse_response, mock_validate): mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) records_api.record_service_bulk_delete_record.assert_called_once_with( VAULT_ID, - request.table, - expected_payload + TABLE_NAME, + skyflow_ids=["12345", "67890"] ) mock_parse_response.assert_called_once_with(mock_api_response) @@ -287,6 +310,17 @@ def test_delete_successful(self, mock_parse_response, mock_validate): self.assertEqual(result.deleted_ids, expected_deleted_ids) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_delete_request") + def test_delete_handles_generic_exception(self, mock_validate): + request = DeleteRequest(table="test_table", ids=["id1", "id2"]) + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_delete_record.side_effect = Exception("Generic Error") + + with self.assertRaises(Exception): + self.vault.delete(request) + + records_api.record_service_bulk_delete_record.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_get_request") @patch("skyflow.vault.controller._vault.parse_get_response") def test_get_successful(self, mock_parse_response, mock_validate): @@ -405,6 +439,17 @@ def test_get_successful_with_column_values(self, mock_parse_response, mock_valid self.assertEqual(result.data, expected_data) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_get_request") + def test_get_handles_generic_error(self, mock_validate): + request = GetRequest(table="test_table", ids=["id1", "id2"]) + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_get_record.side_effect = Exception("Generic Exception") + + with self.assertRaises(Exception): + self.vault.get(request) + + records_api.record_service_bulk_get_record.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_query_request") @patch("skyflow.vault.controller._vault.parse_query_response") def test_query_successful(self, mock_parse_response, mock_validate): @@ -413,9 +458,6 @@ def test_query_successful(self, mock_parse_response, mock_validate): # Mock request request = QueryRequest(query="SELECT * FROM test_table") - # Expected payload as a QueryServiceExecuteQueryBody instance - expected_payload = QueryServiceExecuteQueryBody(query=request.query) - # Mock API response mock_api_response = Mock() mock_api_response.records = [ @@ -443,7 +485,7 @@ def test_query_successful(self, mock_parse_response, mock_validate): mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) query_api.query_service_execute_query.assert_called_once_with( VAULT_ID, - expected_payload + query="SELECT * FROM test_table" ) mock_parse_response.assert_called_once_with(mock_api_response) @@ -451,6 +493,17 @@ def test_query_successful(self, mock_parse_response, mock_validate): self.assertEqual(result.fields, expected_fields) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_query_request") + def test_query_handles_generic_error(self, mock_validate): + request = QueryRequest(query="SELECT * from table_name") + query_api = self.vault_client.get_query_api.return_value + query_api.query_service_execute_query.side_effect = Exception("Generic Exception") + + with self.assertRaises(Exception): + self.vault.query(request) + + query_api.query_service_execute_query.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_detokenize_request") @patch("skyflow.vault.controller._vault.parse_detokenize_response") def test_detokenize_successful(self, mock_parse_response, mock_validate): @@ -458,25 +511,20 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): data=[ { 'token': 'token1', - 'redaction': RedactionType.PLAIN_TEXT + 'redaction': 'PLAIN_TEXT' }, { 'token': 'token2', - 'redaction': RedactionType.PLAIN_TEXT + 'redaction': 'PLAIN_TEXT' } ], continue_on_error=False ) - # Expected payload as a V1DetokenizePayload instance - tokens_list = [ - V1DetokenizeRecordRequest(token="token1", redaction=RedactionEnumREDACTION.PLAIN_TEXT), - V1DetokenizeRecordRequest(token="token2", redaction=RedactionEnumREDACTION.PLAIN_TEXT) + expected_tokens_list = [ + V1DetokenizeRecordRequest(token="token1", redaction="PLAIN_TEXT"), + V1DetokenizeRecordRequest(token="token2", redaction="PLAIN_TEXT") ] - expected_payload = V1DetokenizePayload( - detokenization_parameters=tokens_list, - continue_on_error=request.continue_on_error - ) # Mock API response mock_api_response = Mock() @@ -504,7 +552,8 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) tokens_api.record_service_detokenize.assert_called_once_with( VAULT_ID, - detokenize_payload=expected_payload + detokenization_parameters=expected_tokens_list, + continue_on_error=False ) mock_parse_response.assert_called_once_with(mock_api_response) @@ -512,6 +561,29 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): self.assertEqual(result.detokenized_fields, expected_fields) self.assertEqual(result.errors, []) # No errors expected + @patch("skyflow.vault.controller._vault.validate_detokenize_request") + def test_detokenize_handles_generic_error(self, mock_validate): + request = DetokenizeRequest( + data=[ + { + 'token': 'token1', + 'redaction': RedactionType.PLAIN_TEXT + }, + { + 'token': 'token2', + 'redaction': RedactionType.PLAIN_TEXT + } + ], + continue_on_error=False + ) + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_detokenize.side_effect = Exception("Generic Error") + + with self.assertRaises(Exception): + self.vault.detokenize(request) + + tokens_api.record_service_detokenize.assert_called_once() + @patch("skyflow.vault.controller._vault.validate_tokenize_request") @patch("skyflow.vault.controller._vault.parse_tokenize_response") def test_tokenize_successful(self, mock_parse_response, mock_validate): @@ -525,12 +597,10 @@ def test_tokenize_successful(self, mock_parse_response, mock_validate): ] ) - # Expected payload as a V1TokenizePayload instance - records_list = [ + expected_records_list = [ V1TokenizeRecordRequest(value="value1", column_group="group1"), V1TokenizeRecordRequest(value="value2", column_group="group2") ] - expected_payload = V1TokenizePayload(tokenization_parameters=records_list) # Mock API response mock_api_response = Mock() @@ -558,9 +628,25 @@ def test_tokenize_successful(self, mock_parse_response, mock_validate): mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) tokens_api.record_service_tokenize.assert_called_once_with( VAULT_ID, - tokenize_payload=expected_payload + tokenization_parameters=expected_records_list ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected TokenizeResponse - self.assertEqual(result.tokenized_fields, expected_fields) \ No newline at end of file + self.assertEqual(result.tokenized_fields, expected_fields) + + @patch("skyflow.vault.controller._vault.validate_tokenize_request") + def test_tokenize_handles_generic_error(self, mock_validate): + request = TokenizeRequest( + values=[ + {"value": "value1", "column_group": "group1"}, + {"value": "value2", "column_group": "group2"} + ] + ) + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_tokenize.side_effect = Exception("Generic Error") + + with self.assertRaises(Exception): + self.vault.tokenize(request) + + tokens_api.record_service_tokenize.assert_called_once() From 343b88c47f12a8ad0623a2901e1c8f55566cba00 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Mon, 17 Mar 2025 13:21:19 +0000 Subject: [PATCH 19/60] [AUTOMATED] Private Release 2.0.0b2.dev0+f760bc0 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 10181764..fc1f9e6a 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b1.dev0+3d4ee51' +current_version = '2.0.0b2.dev0+f760bc0' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 522115a4..d73d9196 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b1.dev0+3d4ee51' \ No newline at end of file +SDK_VERSION = '2.0.0b2.dev0+f760bc0' \ No newline at end of file From 5ef3fac398178ce9faaeec0f01f0a895197de96a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Camith-skyflow=E2=80=9D?= <“amit@skyflow.com”> Date: Wed, 16 Apr 2025 13:15:54 +0530 Subject: [PATCH 20/60] SK-2003: update readme --- README.md | 125 ++++++++++++++++++ .../bearer_token_expiry_example.py | 111 ++++++++++++++++ 2 files changed, 236 insertions(+) create mode 100644 samples/service_account/bearer_token_expiry_example.py diff --git a/README.md b/README.md index af1e79f4..de6ce235 100644 --- a/README.md +++ b/README.md @@ -39,6 +39,7 @@ The Skyflow Python SDK is designed to help with integrating Skyflow into a Pytho - [Generate bearer tokens with context](#generate-bearer-tokens-with-context) - [Generate scoped bearer tokens](#generate-scoped-bearer-tokens) - [Generate signed data tokens](#generate-signed-data-tokens) + - [Bearer token expiry edge case](#bearer-token-expiry-edge-case) - [Logging](#logging) - [Reporting a Vulnerability](#reporting-a-vulnerability) @@ -2142,6 +2143,130 @@ Notes: - The `time_to_live` (TTL) value should be specified in seconds. - By default, the TTL value is set to 60 seconds. +#### Bearer token expiry edge case +When you use bearer tokens for authentication and API requests in SDKs, there's the potential for a token to expire after the token is verified as valid but before the actual API call is made, causing the request to fail unexpectedly due to the token's expiration. An error from this edge case would look something like this: + +```txt +message: Authentication failed. Bearer token is expired. Use a valid bearer token. See https://docs.skyflow.com/api-authentication/ +``` + +If you encounter this kind of error, retry the request. During the retry, the SDK detects that the previous bearer token has expired and generates a new one for the current and subsequent requests. + +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/bearer_token_expiry_example.py): +```python +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +""" + * This example demonstrates how to configure and use the Skyflow SDK + * to detokenize sensitive data stored in a Skyflow vault. + * It includes setting up credentials, configuring the vault, and + * making a detokenization request. The code also implements a retry + * mechanism to handle unauthorized access errors (HTTP 401). +""" + + +def detokenize_data(skyflow_client, vault_id): + try: + # Creating a list of tokens to be detokenized + detokenize_data = [ + { + 'token': '', + 'redaction': RedactionType.REDACTED + }, + { + 'token': '', + 'redaction': RedactionType.MASKED + } + ] + + # Building a detokenization request + detokenize_request = DetokenizeRequest( + data=detokenize_data, + continue_on_error=False + ) + + # Sending the detokenization request and receiving the response + response = skyflow_client.vault(vault_id).detokenize(detokenize_request) + + # Printing the detokenized response + print('Detokenization successful:', response) + + except SkyflowError as error: + print("Skyflow error occurred:", error) + raise + + except Exception as error: + print("Unexpected error occurred:", error) + raise + + +def perform_detokenization(): + try: + # Setting up credentials for accessing the Skyflow vault + cred = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Credentials string for authentication + } + + credentials = { + 'token': '' + } + + # Configuring the Skyflow vault with necessary details + primary_vault_config = { + 'vault_id': '', # Vault ID + 'cluster_id': '', # Cluster ID + 'env': Env.PROD, # Environment set to PROD + 'credentials': credentials # Setting credentials + } + + # Creating a Skyflow client instance with the configured vault + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) + .set_log_level(LogLevel.ERROR) # Setting log level to ERROR + .build() + ) + + # Attempting to detokenize data using the Skyflow client + try: + detokenize_data(skyflow_client, primary_vault_config.get('vault_id')) + except SkyflowError as err: + # Retry detokenization if the error is due to unauthorized access (HTTP 401) + if err.http_code == 401: + print("Unauthorized access detected. Retrying...") + detokenize_data(skyflow_client, primary_vault_config.get('vault_id')) + else: + # Rethrow the exception for other error codes + raise err + + except SkyflowError as error: + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the function +perform_detokenization() +``` + ## Logging The SDK provides logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(log_level)` as shown below: diff --git a/samples/service_account/bearer_token_expiry_example.py b/samples/service_account/bearer_token_expiry_example.py new file mode 100644 index 00000000..169bf500 --- /dev/null +++ b/samples/service_account/bearer_token_expiry_example.py @@ -0,0 +1,111 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +""" + * This example demonstrates how to configure and use the Skyflow SDK + * to detokenize sensitive data stored in a Skyflow vault. + * It includes setting up credentials, configuring the vault, and + * making a detokenization request. The code also implements a retry + * mechanism to handle unauthorized access errors (HTTP 401). +""" + + +def detokenize_data(skyflow_client, vault_id): + try: + # Creating a list of tokens to be detokenized + detokenize_data = [ + { + 'token': '', + 'redaction': RedactionType.REDACTED + }, + { + 'token': '', + 'redaction': RedactionType.MASKED + } + ] + + # Building a detokenization request + detokenize_request = DetokenizeRequest( + data=detokenize_data, + continue_on_error=False + ) + + # Sending the detokenization request and receiving the response + response = skyflow_client.vault(vault_id).detokenize(detokenize_request) + + # Printing the detokenized response + print('Detokenization successful:', response) + + except SkyflowError as error: + print("Skyflow error occurred:", error) + raise + + except Exception as error: + print("Unexpected error occurred:", error) + raise + + +def perform_detokenization(): + try: + # Setting up credentials for accessing the Skyflow vault + cred = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) # Credentials string for authentication + } + + credentials = { + 'token': '' + } + + # Configuring the Skyflow vault with necessary details + primary_vault_config = { + 'vault_id': '', # Vault ID + 'cluster_id': '', # Cluster ID + 'env': Env.PROD, # Environment set to PROD + 'credentials': credentials # Setting credentials + } + + # Creating a Skyflow client instance with the configured vault + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) + .set_log_level(LogLevel.ERROR) # Setting log level to ERROR + .build() + ) + + # Attempting to detokenize data using the Skyflow client + try: + detokenize_data(skyflow_client, primary_vault_config.get('vault_id')) + except SkyflowError as err: + # Retry detokenization if the error is due to unauthorized access (HTTP 401) + if err.http_code == 401: + print("Unauthorized access detected. Retrying...") + detokenize_data(skyflow_client, primary_vault_config.get('vault_id')) + else: + # Rethrow the exception for other error codes + raise err + + except SkyflowError as error: + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + + +# Invoke the function +perform_detokenization() From 3f3a2971d38246ce4f7696fa21517a02bb003d5a Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Fri, 2 May 2025 19:24:21 +0530 Subject: [PATCH 21/60] SK-1909 Update generated code --- skyflow/generated/rest/__init__.py | 2 + skyflow/generated/rest/audit/__init__.py | 2 + skyflow/generated/rest/audit/client.py | 266 ++- skyflow/generated/rest/audit/raw_client.py | 482 +++++ .../generated/rest/audit/types/__init__.py | 2 + .../generated/rest/authentication/__init__.py | 2 + .../generated/rest/authentication/client.py | 196 +-- .../rest/authentication/raw_client.py | 235 +++ skyflow/generated/rest/bin_lookup/__init__.py | 2 + skyflow/generated/rest/bin_lookup/client.py | 151 +- .../generated/rest/bin_lookup/raw_client.py | 177 ++ skyflow/generated/rest/client.py | 46 +- skyflow/generated/rest/core/__init__.py | 5 + skyflow/generated/rest/core/api_error.py | 18 +- skyflow/generated/rest/core/client_wrapper.py | 8 +- skyflow/generated/rest/core/http_client.py | 2 - skyflow/generated/rest/core/http_response.py | 55 + .../generated/rest/core/jsonable_encoder.py | 1 - .../generated/rest/core/pydantic_utilities.py | 179 +- skyflow/generated/rest/core/serialization.py | 10 +- skyflow/generated/rest/errors/__init__.py | 2 + .../rest/errors/bad_request_error.py | 3 +- .../generated/rest/errors/not_found_error.py | 3 +- .../rest/errors/unauthorized_error.py | 3 +- skyflow/generated/rest/query/__init__.py | 2 + skyflow/generated/rest/query/client.py | 137 +- skyflow/generated/rest/query/raw_client.py | 152 ++ skyflow/generated/rest/records/__init__.py | 2 + skyflow/generated/rest/records/client.py | 1283 +++----------- skyflow/generated/rest/records/raw_client.py | 1545 +++++++++++++++++ .../generated/rest/records/types/__init__.py | 2 + skyflow/generated/rest/tokens/__init__.py | 2 + skyflow/generated/rest/tokens/client.py | 287 +-- skyflow/generated/rest/tokens/raw_client.py | 318 ++++ skyflow/generated/rest/types/__init__.py | 2 + .../rest/types/audit_event_context.py | 10 +- .../generated/rest/types/audit_event_data.py | 4 +- .../rest/types/audit_event_http_info.py | 8 +- .../generated/rest/types/googlerpc_status.py | 6 +- skyflow/generated/rest/types/protobuf_any.py | 8 +- .../rest/types/v_1_audit_after_options.py | 4 +- .../rest/types/v_1_audit_event_response.py | 4 +- .../rest/types/v_1_audit_response.py | 8 +- .../rest/types/v_1_audit_response_event.py | 12 +- .../types/v_1_audit_response_event_request.py | 10 +- .../types/v_1_batch_operation_response.py | 8 +- .../generated/rest/types/v_1_batch_record.py | 4 +- .../rest/types/v_1_bin_list_response.py | 6 +- .../types/v_1_bulk_delete_record_response.py | 8 +- .../types/v_1_bulk_get_record_response.py | 6 +- skyflow/generated/rest/types/v_1_card.py | 8 +- .../rest/types/v_1_delete_file_response.py | 4 +- .../rest/types/v_1_delete_record_response.py | 4 +- .../types/v_1_detokenize_record_request.py | 4 +- .../types/v_1_detokenize_record_response.py | 6 +- .../rest/types/v_1_detokenize_response.py | 6 +- .../generated/rest/types/v_1_field_records.py | 4 +- .../rest/types/v_1_get_auth_token_response.py | 8 +- .../v_1_get_file_scan_status_response.py | 6 +- .../rest/types/v_1_get_query_response.py | 6 +- .../rest/types/v_1_insert_record_response.py | 6 +- .../rest/types/v_1_record_meta_properties.py | 4 +- .../rest/types/v_1_tokenize_record_request.py | 4 +- .../types/v_1_tokenize_record_response.py | 4 +- .../rest/types/v_1_tokenize_response.py | 6 +- .../rest/types/v_1_update_record_response.py | 4 +- .../rest/types/v_1_vault_field_mapping.py | 4 +- .../rest/types/v_1_vault_schema_config.py | 4 +- skyflow/generated/rest/version.py | 2 +- 69 files changed, 3749 insertions(+), 2035 deletions(-) create mode 100644 skyflow/generated/rest/audit/raw_client.py create mode 100644 skyflow/generated/rest/authentication/raw_client.py create mode 100644 skyflow/generated/rest/bin_lookup/raw_client.py create mode 100644 skyflow/generated/rest/core/http_response.py create mode 100644 skyflow/generated/rest/query/raw_client.py create mode 100644 skyflow/generated/rest/records/raw_client.py create mode 100644 skyflow/generated/rest/tokens/raw_client.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index 5cacae7e..af42a8fa 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( AuditEventAuditResourceType, AuditEventContext, diff --git a/skyflow/generated/rest/audit/__init__.py b/skyflow/generated/rest/audit/__init__.py index 38fe28d3..e3b20ff0 100644 --- a/skyflow/generated/rest/audit/__init__.py +++ b/skyflow/generated/rest/audit/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( AuditServiceListAuditEventsRequestFilterOpsActionType, AuditServiceListAuditEventsRequestFilterOpsContextAccessType, diff --git a/skyflow/generated/rest/audit/client.py b/skyflow/generated/rest/audit/client.py index 3b4d329a..7e22b077 100644 --- a/skyflow/generated/rest/audit/client.py +++ b/skyflow/generated/rest/audit/client.py @@ -1,37 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper import typing -from .types.audit_service_list_audit_events_request_filter_ops_context_actor_type import ( - AuditServiceListAuditEventsRequestFilterOpsContextActorType, + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.v_1_audit_response import V1AuditResponse +from .raw_client import AsyncRawAuditClient, RawAuditClient +from .types.audit_service_list_audit_events_request_filter_ops_action_type import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, ) from .types.audit_service_list_audit_events_request_filter_ops_context_access_type import ( AuditServiceListAuditEventsRequestFilterOpsContextAccessType, ) +from .types.audit_service_list_audit_events_request_filter_ops_context_actor_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextActorType, +) from .types.audit_service_list_audit_events_request_filter_ops_context_auth_mode import ( AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, ) -from .types.audit_service_list_audit_events_request_filter_ops_action_type import ( - AuditServiceListAuditEventsRequestFilterOpsActionType, -) from .types.audit_service_list_audit_events_request_filter_ops_resource_type import ( AuditServiceListAuditEventsRequestFilterOpsResourceType, ) from .types.audit_service_list_audit_events_request_sort_ops_order_by import ( AuditServiceListAuditEventsRequestSortOpsOrderBy, ) -from ..core.request_options import RequestOptions -from ..types.v_1_audit_response import V1AuditResponse -from ..core.pydantic_utilities import parse_obj_as -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper class AuditClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAuditClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAuditClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAuditClient + """ + return self._raw_client def audit_service_list_audit_events( self, @@ -192,82 +200,62 @@ def audit_service_list_audit_events( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.audit.audit_service_list_audit_events( - filter_ops_account_id="filterOps.accountID", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.audit.audit_service_list_audit_events(filter_ops_account_id='filterOps.accountID', ) """ - _response = self._client_wrapper.httpx_client.request( - "v1/audit/events", - method="GET", - params={ - "filterOps.context.changeID": filter_ops_context_change_id, - "filterOps.context.requestID": filter_ops_context_request_id, - "filterOps.context.traceID": filter_ops_context_trace_id, - "filterOps.context.sessionID": filter_ops_context_session_id, - "filterOps.context.actor": filter_ops_context_actor, - "filterOps.context.actorType": filter_ops_context_actor_type, - "filterOps.context.accessType": filter_ops_context_access_type, - "filterOps.context.ipAddress": filter_ops_context_ip_address, - "filterOps.context.origin": filter_ops_context_origin, - "filterOps.context.authMode": filter_ops_context_auth_mode, - "filterOps.context.jwtID": filter_ops_context_jwt_id, - "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, - "filterOps.parentAccountID": filter_ops_parent_account_id, - "filterOps.accountID": filter_ops_account_id, - "filterOps.workspaceID": filter_ops_workspace_id, - "filterOps.vaultID": filter_ops_vault_id, - "filterOps.resourceIDs": filter_ops_resource_i_ds, - "filterOps.actionType": filter_ops_action_type, - "filterOps.resourceType": filter_ops_resource_type, - "filterOps.tags": filter_ops_tags, - "filterOps.responseCode": filter_ops_response_code, - "filterOps.startTime": filter_ops_start_time, - "filterOps.endTime": filter_ops_end_time, - "filterOps.apiName": filter_ops_api_name, - "filterOps.responseMessage": filter_ops_response_message, - "filterOps.httpMethod": filter_ops_http_method, - "filterOps.httpURI": filter_ops_http_uri, - "sortOps.sortBy": sort_ops_sort_by, - "sortOps.orderBy": sort_ops_order_by, - "afterOps.timestamp": after_ops_timestamp, - "afterOps.changeID": after_ops_change_id, - "limit": limit, - "offset": offset, - }, + _response = self._raw_client.audit_service_list_audit_events( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_i_ds=filter_ops_resource_i_ds, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1AuditResponse, - parse_obj_as( - type_=V1AuditResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncAuditClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAuditClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAuditClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAuditClient + """ + return self._raw_client async def audit_service_list_audit_events( self, @@ -427,83 +415,47 @@ async def audit_service_list_audit_events( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.audit.audit_service_list_audit_events( - filter_ops_account_id="filterOps.accountID", - ) - - + await client.audit.audit_service_list_audit_events(filter_ops_account_id='filterOps.accountID', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "v1/audit/events", - method="GET", - params={ - "filterOps.context.changeID": filter_ops_context_change_id, - "filterOps.context.requestID": filter_ops_context_request_id, - "filterOps.context.traceID": filter_ops_context_trace_id, - "filterOps.context.sessionID": filter_ops_context_session_id, - "filterOps.context.actor": filter_ops_context_actor, - "filterOps.context.actorType": filter_ops_context_actor_type, - "filterOps.context.accessType": filter_ops_context_access_type, - "filterOps.context.ipAddress": filter_ops_context_ip_address, - "filterOps.context.origin": filter_ops_context_origin, - "filterOps.context.authMode": filter_ops_context_auth_mode, - "filterOps.context.jwtID": filter_ops_context_jwt_id, - "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, - "filterOps.parentAccountID": filter_ops_parent_account_id, - "filterOps.accountID": filter_ops_account_id, - "filterOps.workspaceID": filter_ops_workspace_id, - "filterOps.vaultID": filter_ops_vault_id, - "filterOps.resourceIDs": filter_ops_resource_i_ds, - "filterOps.actionType": filter_ops_action_type, - "filterOps.resourceType": filter_ops_resource_type, - "filterOps.tags": filter_ops_tags, - "filterOps.responseCode": filter_ops_response_code, - "filterOps.startTime": filter_ops_start_time, - "filterOps.endTime": filter_ops_end_time, - "filterOps.apiName": filter_ops_api_name, - "filterOps.responseMessage": filter_ops_response_message, - "filterOps.httpMethod": filter_ops_http_method, - "filterOps.httpURI": filter_ops_http_uri, - "sortOps.sortBy": sort_ops_sort_by, - "sortOps.orderBy": sort_ops_order_by, - "afterOps.timestamp": after_ops_timestamp, - "afterOps.changeID": after_ops_change_id, - "limit": limit, - "offset": offset, - }, + _response = await self._raw_client.audit_service_list_audit_events( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_i_ds=filter_ops_resource_i_ds, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1AuditResponse, - parse_obj_as( - type_=V1AuditResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/audit/raw_client.py b/skyflow/generated/rest/audit/raw_client.py new file mode 100644 index 00000000..9762e46d --- /dev/null +++ b/skyflow/generated/rest/audit/raw_client.py @@ -0,0 +1,482 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.not_found_error import NotFoundError +from ..types.v_1_audit_response import V1AuditResponse +from .types.audit_service_list_audit_events_request_filter_ops_action_type import ( + AuditServiceListAuditEventsRequestFilterOpsActionType, +) +from .types.audit_service_list_audit_events_request_filter_ops_context_access_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextAccessType, +) +from .types.audit_service_list_audit_events_request_filter_ops_context_actor_type import ( + AuditServiceListAuditEventsRequestFilterOpsContextActorType, +) +from .types.audit_service_list_audit_events_request_filter_ops_context_auth_mode import ( + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode, +) +from .types.audit_service_list_audit_events_request_filter_ops_resource_type import ( + AuditServiceListAuditEventsRequestFilterOpsResourceType, +) +from .types.audit_service_list_audit_events_request_sort_ops_order_by import ( + AuditServiceListAuditEventsRequestSortOpsOrderBy, +) + + +class RawAuditClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def audit_service_list_audit_events( + self, + *, + filter_ops_account_id: str, + filter_ops_context_change_id: typing.Optional[str] = None, + filter_ops_context_request_id: typing.Optional[str] = None, + filter_ops_context_trace_id: typing.Optional[str] = None, + filter_ops_context_session_id: typing.Optional[str] = None, + filter_ops_context_actor: typing.Optional[str] = None, + filter_ops_context_actor_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextActorType + ] = None, + filter_ops_context_access_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAccessType + ] = None, + filter_ops_context_ip_address: typing.Optional[str] = None, + filter_ops_context_origin: typing.Optional[str] = None, + filter_ops_context_auth_mode: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode + ] = None, + filter_ops_context_jwt_id: typing.Optional[str] = None, + filter_ops_context_bearer_token_context_id: typing.Optional[str] = None, + filter_ops_parent_account_id: typing.Optional[str] = None, + filter_ops_workspace_id: typing.Optional[str] = None, + filter_ops_vault_id: typing.Optional[str] = None, + filter_ops_resource_i_ds: typing.Optional[str] = None, + filter_ops_action_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] = None, + filter_ops_resource_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] = None, + filter_ops_tags: typing.Optional[str] = None, + filter_ops_response_code: typing.Optional[int] = None, + filter_ops_start_time: typing.Optional[str] = None, + filter_ops_end_time: typing.Optional[str] = None, + filter_ops_api_name: typing.Optional[str] = None, + filter_ops_response_message: typing.Optional[str] = None, + filter_ops_http_method: typing.Optional[str] = None, + filter_ops_http_uri: typing.Optional[str] = None, + sort_ops_sort_by: typing.Optional[str] = None, + sort_ops_order_by: typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] = None, + after_ops_timestamp: typing.Optional[str] = None, + after_ops_change_id: typing.Optional[str] = None, + limit: typing.Optional[int] = None, + offset: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1AuditResponse]: + """ + Lists audit events that match query parameters. + + Parameters + ---------- + filter_ops_account_id : str + Resources with the specified account ID. + + filter_ops_context_change_id : typing.Optional[str] + ID for the audit event. + + filter_ops_context_request_id : typing.Optional[str] + ID for the request that caused the event. + + filter_ops_context_trace_id : typing.Optional[str] + ID for the request set by the service that received the request. + + filter_ops_context_session_id : typing.Optional[str] + ID for the session in which the request was sent. + + filter_ops_context_actor : typing.Optional[str] + Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + + filter_ops_context_actor_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextActorType] + Type of member who sent the request. + + filter_ops_context_access_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAccessType] + Type of access for the request. + + filter_ops_context_ip_address : typing.Optional[str] + IP Address of the client that made the request. + + filter_ops_context_origin : typing.Optional[str] + HTTP Origin request header (including scheme, hostname, and port) of the request. + + filter_ops_context_auth_mode : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAuthMode] + Authentication mode the `actor` used. + + filter_ops_context_jwt_id : typing.Optional[str] + ID of the JWT token. + + filter_ops_context_bearer_token_context_id : typing.Optional[str] + Embedded User Context. + + filter_ops_parent_account_id : typing.Optional[str] + Resources with the specified parent account ID. + + filter_ops_workspace_id : typing.Optional[str] + Resources with the specified workspace ID. + + filter_ops_vault_id : typing.Optional[str] + Resources with the specified vault ID. + + filter_ops_resource_i_ds : typing.Optional[str] + Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of "\/\". For example, "VAULT/12345, USER/67890". + + filter_ops_action_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] + Events with the specified action type. + + filter_ops_resource_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] + Resources with the specified type. + + filter_ops_tags : typing.Optional[str] + Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, "login, get". + + filter_ops_response_code : typing.Optional[int] + HTTP response code of the request. + + filter_ops_start_time : typing.Optional[str] + Start timestamp for the query, in SQL format. + + filter_ops_end_time : typing.Optional[str] + End timestamp for the query, in SQL format. + + filter_ops_api_name : typing.Optional[str] + Name of the API called in the request. + + filter_ops_response_message : typing.Optional[str] + Response message of the request. + + filter_ops_http_method : typing.Optional[str] + HTTP method of the request. + + filter_ops_http_uri : typing.Optional[str] + HTTP URI of the request. + + sort_ops_sort_by : typing.Optional[str] + Fully-qualified field by which to sort results. Field names should be in camel case (for example, "capitalization.camelCase"). + + sort_ops_order_by : typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] + Ascending or descending ordering of results. + + after_ops_timestamp : typing.Optional[str] + Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + after_ops_change_id : typing.Optional[str] + Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + limit : typing.Optional[int] + Number of results to return. + + offset : typing.Optional[int] + Record position at which to start returning results. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1AuditResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/audit/events", + method="GET", + params={ + "filterOps.context.changeID": filter_ops_context_change_id, + "filterOps.context.requestID": filter_ops_context_request_id, + "filterOps.context.traceID": filter_ops_context_trace_id, + "filterOps.context.sessionID": filter_ops_context_session_id, + "filterOps.context.actor": filter_ops_context_actor, + "filterOps.context.actorType": filter_ops_context_actor_type, + "filterOps.context.accessType": filter_ops_context_access_type, + "filterOps.context.ipAddress": filter_ops_context_ip_address, + "filterOps.context.origin": filter_ops_context_origin, + "filterOps.context.authMode": filter_ops_context_auth_mode, + "filterOps.context.jwtID": filter_ops_context_jwt_id, + "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, + "filterOps.parentAccountID": filter_ops_parent_account_id, + "filterOps.accountID": filter_ops_account_id, + "filterOps.workspaceID": filter_ops_workspace_id, + "filterOps.vaultID": filter_ops_vault_id, + "filterOps.resourceIDs": filter_ops_resource_i_ds, + "filterOps.actionType": filter_ops_action_type, + "filterOps.resourceType": filter_ops_resource_type, + "filterOps.tags": filter_ops_tags, + "filterOps.responseCode": filter_ops_response_code, + "filterOps.startTime": filter_ops_start_time, + "filterOps.endTime": filter_ops_end_time, + "filterOps.apiName": filter_ops_api_name, + "filterOps.responseMessage": filter_ops_response_message, + "filterOps.httpMethod": filter_ops_http_method, + "filterOps.httpURI": filter_ops_http_uri, + "sortOps.sortBy": sort_ops_sort_by, + "sortOps.orderBy": sort_ops_order_by, + "afterOps.timestamp": after_ops_timestamp, + "afterOps.changeID": after_ops_change_id, + "limit": limit, + "offset": offset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1AuditResponse, + parse_obj_as( + type_=V1AuditResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawAuditClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def audit_service_list_audit_events( + self, + *, + filter_ops_account_id: str, + filter_ops_context_change_id: typing.Optional[str] = None, + filter_ops_context_request_id: typing.Optional[str] = None, + filter_ops_context_trace_id: typing.Optional[str] = None, + filter_ops_context_session_id: typing.Optional[str] = None, + filter_ops_context_actor: typing.Optional[str] = None, + filter_ops_context_actor_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextActorType + ] = None, + filter_ops_context_access_type: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAccessType + ] = None, + filter_ops_context_ip_address: typing.Optional[str] = None, + filter_ops_context_origin: typing.Optional[str] = None, + filter_ops_context_auth_mode: typing.Optional[ + AuditServiceListAuditEventsRequestFilterOpsContextAuthMode + ] = None, + filter_ops_context_jwt_id: typing.Optional[str] = None, + filter_ops_context_bearer_token_context_id: typing.Optional[str] = None, + filter_ops_parent_account_id: typing.Optional[str] = None, + filter_ops_workspace_id: typing.Optional[str] = None, + filter_ops_vault_id: typing.Optional[str] = None, + filter_ops_resource_i_ds: typing.Optional[str] = None, + filter_ops_action_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] = None, + filter_ops_resource_type: typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] = None, + filter_ops_tags: typing.Optional[str] = None, + filter_ops_response_code: typing.Optional[int] = None, + filter_ops_start_time: typing.Optional[str] = None, + filter_ops_end_time: typing.Optional[str] = None, + filter_ops_api_name: typing.Optional[str] = None, + filter_ops_response_message: typing.Optional[str] = None, + filter_ops_http_method: typing.Optional[str] = None, + filter_ops_http_uri: typing.Optional[str] = None, + sort_ops_sort_by: typing.Optional[str] = None, + sort_ops_order_by: typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] = None, + after_ops_timestamp: typing.Optional[str] = None, + after_ops_change_id: typing.Optional[str] = None, + limit: typing.Optional[int] = None, + offset: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1AuditResponse]: + """ + Lists audit events that match query parameters. + + Parameters + ---------- + filter_ops_account_id : str + Resources with the specified account ID. + + filter_ops_context_change_id : typing.Optional[str] + ID for the audit event. + + filter_ops_context_request_id : typing.Optional[str] + ID for the request that caused the event. + + filter_ops_context_trace_id : typing.Optional[str] + ID for the request set by the service that received the request. + + filter_ops_context_session_id : typing.Optional[str] + ID for the session in which the request was sent. + + filter_ops_context_actor : typing.Optional[str] + Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + + filter_ops_context_actor_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextActorType] + Type of member who sent the request. + + filter_ops_context_access_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAccessType] + Type of access for the request. + + filter_ops_context_ip_address : typing.Optional[str] + IP Address of the client that made the request. + + filter_ops_context_origin : typing.Optional[str] + HTTP Origin request header (including scheme, hostname, and port) of the request. + + filter_ops_context_auth_mode : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsContextAuthMode] + Authentication mode the `actor` used. + + filter_ops_context_jwt_id : typing.Optional[str] + ID of the JWT token. + + filter_ops_context_bearer_token_context_id : typing.Optional[str] + Embedded User Context. + + filter_ops_parent_account_id : typing.Optional[str] + Resources with the specified parent account ID. + + filter_ops_workspace_id : typing.Optional[str] + Resources with the specified workspace ID. + + filter_ops_vault_id : typing.Optional[str] + Resources with the specified vault ID. + + filter_ops_resource_i_ds : typing.Optional[str] + Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of "\/\". For example, "VAULT/12345, USER/67890". + + filter_ops_action_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsActionType] + Events with the specified action type. + + filter_ops_resource_type : typing.Optional[AuditServiceListAuditEventsRequestFilterOpsResourceType] + Resources with the specified type. + + filter_ops_tags : typing.Optional[str] + Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, "login, get". + + filter_ops_response_code : typing.Optional[int] + HTTP response code of the request. + + filter_ops_start_time : typing.Optional[str] + Start timestamp for the query, in SQL format. + + filter_ops_end_time : typing.Optional[str] + End timestamp for the query, in SQL format. + + filter_ops_api_name : typing.Optional[str] + Name of the API called in the request. + + filter_ops_response_message : typing.Optional[str] + Response message of the request. + + filter_ops_http_method : typing.Optional[str] + HTTP method of the request. + + filter_ops_http_uri : typing.Optional[str] + HTTP URI of the request. + + sort_ops_sort_by : typing.Optional[str] + Fully-qualified field by which to sort results. Field names should be in camel case (for example, "capitalization.camelCase"). + + sort_ops_order_by : typing.Optional[AuditServiceListAuditEventsRequestSortOpsOrderBy] + Ascending or descending ordering of results. + + after_ops_timestamp : typing.Optional[str] + Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + after_ops_change_id : typing.Optional[str] + Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + + limit : typing.Optional[int] + Number of results to return. + + offset : typing.Optional[int] + Record position at which to start returning results. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1AuditResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/audit/events", + method="GET", + params={ + "filterOps.context.changeID": filter_ops_context_change_id, + "filterOps.context.requestID": filter_ops_context_request_id, + "filterOps.context.traceID": filter_ops_context_trace_id, + "filterOps.context.sessionID": filter_ops_context_session_id, + "filterOps.context.actor": filter_ops_context_actor, + "filterOps.context.actorType": filter_ops_context_actor_type, + "filterOps.context.accessType": filter_ops_context_access_type, + "filterOps.context.ipAddress": filter_ops_context_ip_address, + "filterOps.context.origin": filter_ops_context_origin, + "filterOps.context.authMode": filter_ops_context_auth_mode, + "filterOps.context.jwtID": filter_ops_context_jwt_id, + "filterOps.context.bearerTokenContextID": filter_ops_context_bearer_token_context_id, + "filterOps.parentAccountID": filter_ops_parent_account_id, + "filterOps.accountID": filter_ops_account_id, + "filterOps.workspaceID": filter_ops_workspace_id, + "filterOps.vaultID": filter_ops_vault_id, + "filterOps.resourceIDs": filter_ops_resource_i_ds, + "filterOps.actionType": filter_ops_action_type, + "filterOps.resourceType": filter_ops_resource_type, + "filterOps.tags": filter_ops_tags, + "filterOps.responseCode": filter_ops_response_code, + "filterOps.startTime": filter_ops_start_time, + "filterOps.endTime": filter_ops_end_time, + "filterOps.apiName": filter_ops_api_name, + "filterOps.responseMessage": filter_ops_response_message, + "filterOps.httpMethod": filter_ops_http_method, + "filterOps.httpURI": filter_ops_http_uri, + "sortOps.sortBy": sort_ops_sort_by, + "sortOps.orderBy": sort_ops_order_by, + "afterOps.timestamp": after_ops_timestamp, + "afterOps.changeID": after_ops_change_id, + "limit": limit, + "offset": offset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1AuditResponse, + parse_obj_as( + type_=V1AuditResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/audit/types/__init__.py b/skyflow/generated/rest/audit/types/__init__.py index 39f38866..71e29b43 100644 --- a/skyflow/generated/rest/audit/types/__init__.py +++ b/skyflow/generated/rest/audit/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .audit_service_list_audit_events_request_filter_ops_action_type import ( AuditServiceListAuditEventsRequestFilterOpsActionType, ) diff --git a/skyflow/generated/rest/authentication/__init__.py b/skyflow/generated/rest/authentication/__init__.py index f3ea2659..5cde0202 100644 --- a/skyflow/generated/rest/authentication/__init__.py +++ b/skyflow/generated/rest/authentication/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/skyflow/generated/rest/authentication/client.py b/skyflow/generated/rest/authentication/client.py index c4825e27..81408a26 100644 --- a/skyflow/generated/rest/authentication/client.py +++ b/skyflow/generated/rest/authentication/client.py @@ -1,16 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.v_1_get_auth_token_response import V1GetAuthTokenResponse -from ..core.pydantic_utilities import parse_obj_as -from ..errors.bad_request_error import BadRequestError -from ..errors.unauthorized_error import UnauthorizedError -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawAuthenticationClient, RawAuthenticationClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +13,18 @@ class AuthenticationClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAuthenticationClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAuthenticationClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAuthenticationClient + """ + return self._raw_client def authentication_service_get_auth_token( self, @@ -65,80 +71,35 @@ def authentication_service_get_auth_token( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.authentication.authentication_service_get_auth_token( - grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.authentication.authentication_service_get_auth_token(grant_type='urn:ietf:params:oauth:grant-type:jwt-bearer', assertion='eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3', ) """ - _response = self._client_wrapper.httpx_client.request( - "v1/auth/sa/oauth/token", - method="POST", - json={ - "grant_type": grant_type, - "assertion": assertion, - "subject_token": subject_token, - "subject_token_type": subject_token_type, - "requested_token_use": requested_token_use, - "scope": scope, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.authentication_service_get_auth_token( + grant_type=grant_type, + assertion=assertion, + subject_token=subject_token, + subject_token_type=subject_token_type, + requested_token_use=requested_token_use, + scope=scope, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetAuthTokenResponse, - parse_obj_as( - type_=V1GetAuthTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncAuthenticationClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAuthenticationClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAuthenticationClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAuthenticationClient + """ + return self._raw_client async def authentication_service_get_auth_token( self, @@ -184,81 +145,20 @@ async def authentication_service_get_auth_token( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.authentication.authentication_service_get_auth_token( - grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", - ) - - + await client.authentication.authentication_service_get_auth_token(grant_type='urn:ietf:params:oauth:grant-type:jwt-bearer', assertion='eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "v1/auth/sa/oauth/token", - method="POST", - json={ - "grant_type": grant_type, - "assertion": assertion, - "subject_token": subject_token, - "subject_token_type": subject_token_type, - "requested_token_use": requested_token_use, - "scope": scope, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.authentication_service_get_auth_token( + grant_type=grant_type, + assertion=assertion, + subject_token=subject_token, + subject_token_type=subject_token_type, + requested_token_use=requested_token_use, + scope=scope, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetAuthTokenResponse, - parse_obj_as( - type_=V1GetAuthTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/authentication/raw_client.py b/skyflow/generated/rest/authentication/raw_client.py new file mode 100644 index 00000000..0c2778c2 --- /dev/null +++ b/skyflow/generated/rest/authentication/raw_client.py @@ -0,0 +1,235 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.not_found_error import NotFoundError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.v_1_get_auth_token_response import V1GetAuthTokenResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAuthenticationClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def authentication_service_get_auth_token( + self, + *, + grant_type: str, + assertion: str, + subject_token: typing.Optional[str] = OMIT, + subject_token_type: typing.Optional[str] = OMIT, + requested_token_use: typing.Optional[str] = OMIT, + scope: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1GetAuthTokenResponse]: + """ +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + Parameters + ---------- + grant_type : str + Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`. + + assertion : str + User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
+ + subject_token : typing.Optional[str] + Subject token. + + subject_token_type : typing.Optional[str] + Subject token type. + + requested_token_use : typing.Optional[str] + Token use type. Either `delegation` or `impersonation`. + + scope : typing.Optional[str] + Subset of available roles to associate with the requested token. Uses the format "role:\ role:\". + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1GetAuthTokenResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/auth/sa/oauth/token", + method="POST", + json={ + "grant_type": grant_type, + "assertion": assertion, + "subject_token": subject_token, + "subject_token_type": subject_token_type, + "requested_token_use": requested_token_use, + "scope": scope, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetAuthTokenResponse, + parse_obj_as( + type_=V1GetAuthTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawAuthenticationClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def authentication_service_get_auth_token( + self, + *, + grant_type: str, + assertion: str, + subject_token: typing.Optional[str] = OMIT, + subject_token_type: typing.Optional[str] = OMIT, + requested_token_use: typing.Optional[str] = OMIT, + scope: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1GetAuthTokenResponse]: + """ +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + Parameters + ---------- + grant_type : str + Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`. + + assertion : str + User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
+ + subject_token : typing.Optional[str] + Subject token. + + subject_token_type : typing.Optional[str] + Subject token type. + + requested_token_use : typing.Optional[str] + Token use type. Either `delegation` or `impersonation`. + + scope : typing.Optional[str] + Subset of available roles to associate with the requested token. Uses the format "role:\ role:\". + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1GetAuthTokenResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/auth/sa/oauth/token", + method="POST", + json={ + "grant_type": grant_type, + "assertion": assertion, + "subject_token": subject_token, + "subject_token_type": subject_token_type, + "requested_token_use": requested_token_use, + "scope": scope, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetAuthTokenResponse, + parse_obj_as( + type_=V1GetAuthTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/bin_lookup/__init__.py b/skyflow/generated/rest/bin_lookup/__init__.py index f3ea2659..5cde0202 100644 --- a/skyflow/generated/rest/bin_lookup/__init__.py +++ b/skyflow/generated/rest/bin_lookup/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/skyflow/generated/rest/bin_lookup/client.py b/skyflow/generated/rest/bin_lookup/client.py index 58d30c51..a217ae60 100644 --- a/skyflow/generated/rest/bin_lookup/client.py +++ b/skyflow/generated/rest/bin_lookup/client.py @@ -1,16 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from ..types.v_1_vault_schema_config import V1VaultSchemaConfig + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.v_1_bin_list_response import V1BinListResponse -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.pydantic_utilities import parse_obj_as -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from ..types.v_1_vault_schema_config import V1VaultSchemaConfig +from .raw_client import AsyncRawBinLookupClient, RawBinLookupClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +14,18 @@ class BinLookupClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawBinLookupClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawBinLookupClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawBinLookupClient + """ + return self._raw_client def bin_list_service_list_cards_of_bin( self, @@ -56,59 +63,33 @@ def bin_list_service_list_cards_of_bin( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.bin_lookup.bin_list_service_list_cards_of_bin( - bin="012345", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.bin_lookup.bin_list_service_list_cards_of_bin(bin='012345', ) """ - _response = self._client_wrapper.httpx_client.request( - "v1/card_lookup", - method="POST", - json={ - "fields": fields, - "BIN": bin, - "vault_schema_config": convert_and_respect_annotation_metadata( - object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" - ), - "skyflow_id": skyflow_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.bin_list_service_list_cards_of_bin( + fields=fields, + bin=bin, + vault_schema_config=vault_schema_config, + skyflow_id=skyflow_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BinListResponse, - parse_obj_as( - type_=V1BinListResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncBinLookupClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawBinLookupClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawBinLookupClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawBinLookupClient + """ + return self._raw_client async def bin_list_service_list_cards_of_bin( self, @@ -145,60 +126,18 @@ async def bin_list_service_list_cards_of_bin( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.bin_lookup.bin_list_service_list_cards_of_bin( - bin="012345", - ) - - + await client.bin_lookup.bin_list_service_list_cards_of_bin(bin='012345', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "v1/card_lookup", - method="POST", - json={ - "fields": fields, - "BIN": bin, - "vault_schema_config": convert_and_respect_annotation_metadata( - object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" - ), - "skyflow_id": skyflow_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.bin_list_service_list_cards_of_bin( + fields=fields, + bin=bin, + vault_schema_config=vault_schema_config, + skyflow_id=skyflow_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BinListResponse, - parse_obj_as( - type_=V1BinListResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/bin_lookup/raw_client.py b/skyflow/generated/rest/bin_lookup/raw_client.py new file mode 100644 index 00000000..c021d684 --- /dev/null +++ b/skyflow/generated/rest/bin_lookup/raw_client.py @@ -0,0 +1,177 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.not_found_error import NotFoundError +from ..types.v_1_bin_list_response import V1BinListResponse +from ..types.v_1_vault_schema_config import V1VaultSchemaConfig + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawBinLookupClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def bin_list_service_list_cards_of_bin( + self, + *, + fields: typing.Optional[typing.Sequence[str]] = OMIT, + bin: typing.Optional[str] = OMIT, + vault_schema_config: typing.Optional[V1VaultSchemaConfig] = OMIT, + skyflow_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1BinListResponse]: + """ + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + Parameters + ---------- + fields : typing.Optional[typing.Sequence[str]] + Fields to return. If not specified, all fields are returned. + + bin : typing.Optional[str] + BIN of the card. + + vault_schema_config : typing.Optional[V1VaultSchemaConfig] + + skyflow_id : typing.Optional[str] + skyflow_id of the record. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1BinListResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/card_lookup", + method="POST", + json={ + "fields": fields, + "BIN": bin, + "vault_schema_config": convert_and_respect_annotation_metadata( + object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" + ), + "skyflow_id": skyflow_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BinListResponse, + parse_obj_as( + type_=V1BinListResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawBinLookupClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def bin_list_service_list_cards_of_bin( + self, + *, + fields: typing.Optional[typing.Sequence[str]] = OMIT, + bin: typing.Optional[str] = OMIT, + vault_schema_config: typing.Optional[V1VaultSchemaConfig] = OMIT, + skyflow_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1BinListResponse]: + """ + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + Parameters + ---------- + fields : typing.Optional[typing.Sequence[str]] + Fields to return. If not specified, all fields are returned. + + bin : typing.Optional[str] + BIN of the card. + + vault_schema_config : typing.Optional[V1VaultSchemaConfig] + + skyflow_id : typing.Optional[str] + skyflow_id of the record. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1BinListResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/card_lookup", + method="POST", + json={ + "fields": fields, + "BIN": bin, + "vault_schema_config": convert_and_respect_annotation_metadata( + object_=vault_schema_config, annotation=V1VaultSchemaConfig, direction="write" + ), + "skyflow_id": skyflow_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BinListResponse, + parse_obj_as( + type_=V1BinListResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/client.py b/skyflow/generated/rest/client.py index 7064d444..3dab76fb 100644 --- a/skyflow/generated/rest/client.py +++ b/skyflow/generated/rest/client.py @@ -1,22 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from .environment import SkyflowEnvironment + import httpx -from .core.client_wrapper import SyncClientWrapper -from .audit.client import AuditClient -from .bin_lookup.client import BinLookupClient -from .records.client import RecordsClient -from .tokens.client import TokensClient -from .query.client import QueryClient -from .authentication.client import AuthenticationClient -from .core.client_wrapper import AsyncClientWrapper -from .audit.client import AsyncAuditClient -from .bin_lookup.client import AsyncBinLookupClient -from .records.client import AsyncRecordsClient -from .tokens.client import AsyncTokensClient -from .query.client import AsyncQueryClient -from .authentication.client import AsyncAuthenticationClient +from .audit.client import AsyncAuditClient, AuditClient +from .authentication.client import AsyncAuthenticationClient, AuthenticationClient +from .bin_lookup.client import AsyncBinLookupClient, BinLookupClient +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .environment import SkyflowEnvironment +from .query.client import AsyncQueryClient, QueryClient +from .records.client import AsyncRecordsClient, RecordsClient +from .tokens.client import AsyncTokensClient, TokensClient class Skyflow: @@ -31,8 +25,6 @@ class Skyflow: environment : SkyflowEnvironment The environment to use for requests from the client. from .environment import SkyflowEnvironment - - Defaults to SkyflowEnvironment.PRODUCTION @@ -50,10 +42,7 @@ class Skyflow: Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) + client = Skyflow(token="YOUR_TOKEN", ) """ def __init__( @@ -66,7 +55,9 @@ def __init__( follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None, ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) self._client_wrapper = SyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, @@ -97,8 +88,6 @@ class AsyncSkyflow: environment : SkyflowEnvironment The environment to use for requests from the client. from .environment import SkyflowEnvironment - - Defaults to SkyflowEnvironment.PRODUCTION @@ -116,10 +105,7 @@ class AsyncSkyflow: Examples -------- from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) + client = AsyncSkyflow(token="YOUR_TOKEN", ) """ def __init__( @@ -132,7 +118,9 @@ def __init__( follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None, ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) self._client_wrapper = AsyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, diff --git a/skyflow/generated/rest/core/__init__.py b/skyflow/generated/rest/core/__init__.py index f03aecbf..31bbb818 100644 --- a/skyflow/generated/rest/core/__init__.py +++ b/skyflow/generated/rest/core/__init__.py @@ -1,10 +1,13 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime from .file import File, convert_file_dict_to_httpx_tuples, with_content_type from .http_client import AsyncHttpClient, HttpClient +from .http_response import AsyncHttpResponse, HttpResponse from .jsonable_encoder import jsonable_encoder from .pydantic_utilities import ( IS_PYDANTIC_V2, @@ -24,10 +27,12 @@ "ApiError", "AsyncClientWrapper", "AsyncHttpClient", + "AsyncHttpResponse", "BaseClientWrapper", "FieldMetadata", "File", "HttpClient", + "HttpResponse", "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", diff --git a/skyflow/generated/rest/core/api_error.py b/skyflow/generated/rest/core/api_error.py index 2e9fc543..6f850a60 100644 --- a/skyflow/generated/rest/core/api_error.py +++ b/skyflow/generated/rest/core/api_error.py @@ -1,15 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import Any, Dict, Optional class ApiError(Exception): - status_code: typing.Optional[int] - body: typing.Any + headers: Optional[Dict[str, str]] + status_code: Optional[int] + body: Any - def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): + def __init__( + self, + *, + headers: Optional[Dict[str, str]] = None, + status_code: Optional[int] = None, + body: Any = None, + ) -> None: + self.headers = headers self.status_code = status_code self.body = body def __str__(self) -> str: - return f"status_code: {self.status_code}, body: {self.body}" + return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index 7177cf7c..2c55b8d9 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. import typing + import httpx -from .http_client import HttpClient -from .http_client import AsyncHttpClient +from .http_client import AsyncHttpClient, HttpClient class BaseClientWrapper: @@ -21,8 +21,8 @@ def __init__( def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", - "X-Fern-SDK-Name": "skyflow", - "X-Fern-SDK-Version": "1.15.2", + "X-Fern-SDK-Name": "skyflow.generated.rest", + "X-Fern-SDK-Version": "0.0.163", } headers["Authorization"] = f"Bearer {self._get_token()}" return headers diff --git a/skyflow/generated/rest/core/http_client.py b/skyflow/generated/rest/core/http_client.py index 275a54cc..e7bd4f79 100644 --- a/skyflow/generated/rest/core/http_client.py +++ b/skyflow/generated/rest/core/http_client.py @@ -2,7 +2,6 @@ import asyncio import email.utils -import json import re import time import typing @@ -11,7 +10,6 @@ from random import random import httpx - from .file import File, convert_file_dict_to_httpx_tuples from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query diff --git a/skyflow/generated/rest/core/http_response.py b/skyflow/generated/rest/core/http_response.py new file mode 100644 index 00000000..48a1798a --- /dev/null +++ b/skyflow/generated/rest/core/http_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Dict, Generic, TypeVar + +import httpx + +T = TypeVar("T") +"""Generic to represent the underlying type of the data wrapped by the HTTP response.""" + + +class BaseHttpResponse: + """Minimalist HTTP response wrapper that exposes response headers.""" + + _response: httpx.Response + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def headers(self) -> Dict[str, str]: + return dict(self._response.headers) + + +class HttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + def close(self) -> None: + self._response.close() + + +class AsyncHttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + async def close(self) -> None: + await self._response.aclose() diff --git a/skyflow/generated/rest/core/jsonable_encoder.py b/skyflow/generated/rest/core/jsonable_encoder.py index 1b631e90..afee3662 100644 --- a/skyflow/generated/rest/core/jsonable_encoder.py +++ b/skyflow/generated/rest/core/jsonable_encoder.py @@ -17,7 +17,6 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union import pydantic - from .datetime_utils import serialize_datetime from .pydantic_utilities import ( IS_PYDANTIC_V2, diff --git a/skyflow/generated/rest/core/pydantic_utilities.py b/skyflow/generated/rest/core/pydantic_utilities.py index ca1f4792..60a2c713 100644 --- a/skyflow/generated/rest/core/pydantic_utilities.py +++ b/skyflow/generated/rest/core/pydantic_utilities.py @@ -2,89 +2,65 @@ # nopycln: file import datetime as dt -import typing from collections import defaultdict - -import typing_extensions +from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast import pydantic -from .datetime_utils import serialize_datetime -from .serialization import convert_and_respect_annotation_metadata - IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - # isort will try to reformat the comments on these imports, which breaks mypy - # isort: off - from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_date as parse_date, - ) - from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_datetime as parse_datetime, - ) - from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - ENCODERS_BY_TYPE as encoders_by_type, - ) - from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - get_args as get_args, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - get_origin as get_origin, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_literal_type as is_literal_type, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_union as is_union, - ) - from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + from pydantic.v1.datetime_parse import parse_date as parse_date + from pydantic.v1.datetime_parse import parse_datetime as parse_datetime + from pydantic.v1.fields import ModelField as ModelField + from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] + from pydantic.v1.typing import get_args as get_args + from pydantic.v1.typing import get_origin as get_origin + from pydantic.v1.typing import is_literal_type as is_literal_type + from pydantic.v1.typing import is_union as is_union else: - from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 - from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 - from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 - from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 - from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 - from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 - from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 - from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 - - # isort: on + from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] + from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] + from pydantic.typing import get_args as get_args # type: ignore[no-redef] + from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] + from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] + from pydantic.typing import is_union as is_union # type: ignore[no-redef] +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata +from typing_extensions import TypeAlias -T = typing.TypeVar("T") -Model = typing.TypeVar("Model", bound=pydantic.BaseModel) +T = TypeVar("T") +Model = TypeVar("Model", bound=pydantic.BaseModel) -def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: +def parse_obj_as(type_: Type[T], object_: Any) -> T: dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") if IS_PYDANTIC_V2: - adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] return adapter.validate_python(dealiased_object) - else: - return pydantic.parse_obj_as(type_, dealiased_object) + return pydantic.parse_obj_as(type_, dealiased_object) -def to_jsonable_with_fallback( - obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] -) -> typing.Any: +def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any: if IS_PYDANTIC_V2: from pydantic_core import to_jsonable_python return to_jsonable_python(obj, fallback=fallback_serializer) - else: - return fallback_serializer(obj) + return fallback_serializer(obj) class UniversalBaseModel(pydantic.BaseModel): if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] # Allow fields beginning with `model_` to be used in the model protected_namespaces=(), - ) # type: ignore # Pydantic v2 + ) - @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 - def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 + @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore[attr-defined] + def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> Any: # type: ignore[name-defined] serialized = handler(self) data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} return data @@ -96,34 +72,28 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} @classmethod - def model_construct( - cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any - ) -> "Model": + def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") return cls.construct(_fields_set, **dealiased_object) @classmethod - def construct( - cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any - ) -> "Model": + def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") if IS_PYDANTIC_V2: - return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 - else: - return super().construct(_fields_set, **dealiased_object) + return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] + return super().construct(_fields_set, **dealiased_object) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { + def json(self, **kwargs: Any) -> str: + kwargs_with_defaults = { "by_alias": True, "exclude_unset": True, **kwargs, } if IS_PYDANTIC_V2: - return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 - else: - return super().json(**kwargs_with_defaults) + return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] + return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + def dict(self, **kwargs: Any) -> Dict[str, Any]: """ Override the default dict method to `exclude_unset` by default. This function patches `exclude_unset` to work include fields within non-None default values. @@ -134,21 +104,21 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models # that we have less control over, and this is less intrusive than custom serializers for now. if IS_PYDANTIC_V2: - kwargs_with_defaults_exclude_unset: typing.Any = { + kwargs_with_defaults_exclude_unset = { **kwargs, "by_alias": True, "exclude_unset": True, "exclude_none": False, } - kwargs_with_defaults_exclude_none: typing.Any = { + kwargs_with_defaults_exclude_none = { **kwargs, "by_alias": True, "exclude_none": True, "exclude_unset": False, } dict_dump = deep_union_pydantic_dicts( - super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 - super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] ) else: @@ -168,7 +138,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: if default is not None: self.__fields_set__.add(name) - kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + kwargs_with_defaults_exclude_unset_include_fields = { "by_alias": True, "exclude_unset": True, "include": _fields_set, @@ -180,12 +150,10 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") -def _union_list_of_pydantic_dicts( - source: typing.List[typing.Any], destination: typing.List[typing.Any] -) -> typing.List[typing.Any]: - converted_list: typing.List[typing.Any] = [] +def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]: + converted_list: List[Any] = [] for i, item in enumerate(source): - destination_value = destination[i] # type: ignore + destination_value = destination[i] if isinstance(item, dict): converted_list.append(deep_union_pydantic_dicts(item, destination_value)) elif isinstance(item, list): @@ -195,9 +163,7 @@ def _union_list_of_pydantic_dicts( return converted_list -def deep_union_pydantic_dicts( - source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] -) -> typing.Dict[str, typing.Any]: +def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]: for key, value in source.items(): node = destination.setdefault(key, {}) if isinstance(value, dict): @@ -215,18 +181,16 @@ def deep_union_pydantic_dicts( if IS_PYDANTIC_V2: - class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[name-defined, type-arg] pass - UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore + UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] else: - UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] -def encode_by_type(o: typing.Any) -> typing.Any: - encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( - defaultdict(tuple) - ) +def encode_by_type(o: Any) -> Any: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) for type_, encoder in encoders_by_type.items(): encoders_by_class_tuples[encoder] += (type_,) @@ -237,54 +201,49 @@ def encode_by_type(o: typing.Any) -> typing.Any: return encoder(o) -def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: +def update_forward_refs(model: Type["Model"], **localns: Any) -> None: if IS_PYDANTIC_V2: - model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] else: model.update_forward_refs(**localns) # Mirrors Pydantic's internal typing -AnyCallable = typing.Callable[..., typing.Any] +AnyCallable = Callable[..., Any] def universal_root_validator( pre: bool = False, -) -> typing.Callable[[AnyCallable], AnyCallable]: +) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] return decorator -def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: +def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) return decorator -PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] +PydanticField = Union[ModelField, pydantic.fields.FieldInfo] -def _get_model_fields( - model: typing.Type["Model"], -) -> typing.Mapping[str, PydanticField]: +def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: if IS_PYDANTIC_V2: - return model.model_fields # type: ignore # Pydantic v2 - else: - return model.__fields__ # type: ignore # Pydantic v1 + return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] + return cast(Mapping[str, PydanticField], model.__fields__) -def _get_field_default(field: PydanticField) -> typing.Any: +def _get_field_default(field: PydanticField) -> Any: try: - value = field.get_default() # type: ignore # Pydantic < v1.10.15 + value = field.get_default() # type: ignore[union-attr] except: value = field.default if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/core/serialization.py b/skyflow/generated/rest/core/serialization.py index cb5dcbf9..c36e865c 100644 --- a/skyflow/generated/rest/core/serialization.py +++ b/skyflow/generated/rest/core/serialization.py @@ -4,9 +4,8 @@ import inspect import typing -import typing_extensions - import pydantic +import typing_extensions class FieldMetadata: @@ -161,7 +160,12 @@ def _convert_mapping( direction: typing.Literal["read", "write"], ) -> typing.Mapping[str, object]: converted_object: typing.Dict[str, object] = {} - annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + try: + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + except NameError: + # The TypedDict contains a circular reference, so + # we use the __annotations__ attribute directly. + annotations = getattr(expected_type, "__annotations__", {}) aliases_to_field_names = _get_alias_to_field_name(annotations) for key, value in object_.items(): if direction == "read" and key in aliases_to_field_names: diff --git a/skyflow/generated/rest/errors/__init__.py b/skyflow/generated/rest/errors/__init__.py index 64f898f5..fdf6196c 100644 --- a/skyflow/generated/rest/errors/__init__.py +++ b/skyflow/generated/rest/errors/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .bad_request_error import BadRequestError from .not_found_error import NotFoundError from .unauthorized_error import UnauthorizedError diff --git a/skyflow/generated/rest/errors/bad_request_error.py b/skyflow/generated/rest/errors/bad_request_error.py index 2f3dba61..5f24fa6d 100644 --- a/skyflow/generated/rest/errors/bad_request_error.py +++ b/skyflow/generated/rest/errors/bad_request_error.py @@ -1,8 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class BadRequestError(ApiError): def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): diff --git a/skyflow/generated/rest/errors/not_found_error.py b/skyflow/generated/rest/errors/not_found_error.py index b557be0a..68977121 100644 --- a/skyflow/generated/rest/errors/not_found_error.py +++ b/skyflow/generated/rest/errors/not_found_error.py @@ -1,8 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class NotFoundError(ApiError): def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): diff --git a/skyflow/generated/rest/errors/unauthorized_error.py b/skyflow/generated/rest/errors/unauthorized_error.py index 6d01cc9f..cd97f14d 100644 --- a/skyflow/generated/rest/errors/unauthorized_error.py +++ b/skyflow/generated/rest/errors/unauthorized_error.py @@ -1,8 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class UnauthorizedError(ApiError): def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): diff --git a/skyflow/generated/rest/query/__init__.py b/skyflow/generated/rest/query/__init__.py index f3ea2659..5cde0202 100644 --- a/skyflow/generated/rest/query/__init__.py +++ b/skyflow/generated/rest/query/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/skyflow/generated/rest/query/client.py b/skyflow/generated/rest/query/client.py index cf3ca319..1f5edd75 100644 --- a/skyflow/generated/rest/query/client.py +++ b/skyflow/generated/rest/query/client.py @@ -1,15 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.v_1_get_query_response import V1GetQueryResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawQueryClient, RawQueryClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -17,7 +13,18 @@ class QueryClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawQueryClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawQueryClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawQueryClient + """ + return self._raw_client def query_service_execute_query( self, @@ -48,55 +55,27 @@ def query_service_execute_query( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.query.query_service_execute_query( - vault_id="vaultID", - query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.query.query_service_execute_query(vault_id='vaultID', query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/query", - method="POST", - json={ - "query": query, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetQueryResponse, - parse_obj_as( - type_=V1GetQueryResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.query_service_execute_query(vault_id, query=query, request_options=request_options) + return _response.data class AsyncQueryClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawQueryClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawQueryClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawQueryClient + """ + return self._raw_client async def query_service_execute_query( self, @@ -126,56 +105,14 @@ async def query_service_execute_query( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.query.query_service_execute_query( - vault_id="vaultID", - query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', - ) - - + await client.query.query_service_execute_query(vault_id='vaultID', query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/query", - method="POST", - json={ - "query": query, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.query_service_execute_query( + vault_id, query=query, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetQueryResponse, - parse_obj_as( - type_=V1GetQueryResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/query/raw_client.py b/skyflow/generated/rest/query/raw_client.py new file mode 100644 index 00000000..b6a201a1 --- /dev/null +++ b/skyflow/generated/rest/query/raw_client.py @@ -0,0 +1,152 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.not_found_error import NotFoundError +from ..types.v_1_get_query_response import V1GetQueryResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawQueryClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def query_service_execute_query( + self, + vault_id: str, + *, + query: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1GetQueryResponse]: + """ + Returns records for a valid SQL query. This endpoint
  • Can return redacted record values.
  • Supports only the SELECT command.
  • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
  • Can't modify the vault or perform transactions.
  • Can't return tokens.
  • Can't return file download or render URLs.
  • Doesn't support the WHERE keyword with columns using transient tokenization.
  • Doesn't support `?` conditional for columns with column-level encryption disabled.
    • + + Parameters + ---------- + vault_id : str + ID of the vault. + + query : typing.Optional[str] + The SQL query to execute.

      Supported commands:
      • SELECT
      Supported operators:
      • >
      • <
      • =
      • AND
      • OR
      • NOT
      • LIKE
      • ILIKE
      • NULL
      • NOT NULL
      Supported keywords:
      • FROM
      • JOIN
      • INNER JOIN
      • LEFT OUTER JOIN
      • LEFT JOIN
      • RIGHT OUTER JOIN
      • RIGHT JOIN
      • FULL OUTER JOIN
      • FULL JOIN
      • OFFSET
      • LIMIT
      • WHERE
      Supported functions:
      • AVG()
      • SUM()
      • COUNT()
      • MIN()
      • MAX()
      • REDACTION()
      + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1GetQueryResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/query", + method="POST", + json={ + "query": query, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetQueryResponse, + parse_obj_as( + type_=V1GetQueryResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawQueryClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def query_service_execute_query( + self, + vault_id: str, + *, + query: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1GetQueryResponse]: + """ + Returns records for a valid SQL query. This endpoint
      • Can return redacted record values.
      • Supports only the SELECT command.
      • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
      • Can't modify the vault or perform transactions.
      • Can't return tokens.
      • Can't return file download or render URLs.
      • Doesn't support the WHERE keyword with columns using transient tokenization.
      • Doesn't support `?` conditional for columns with column-level encryption disabled.
        • + + Parameters + ---------- + vault_id : str + ID of the vault. + + query : typing.Optional[str] + The SQL query to execute.

          Supported commands:
          • SELECT
          Supported operators:
          • >
          • <
          • =
          • AND
          • OR
          • NOT
          • LIKE
          • ILIKE
          • NULL
          • NOT NULL
          Supported keywords:
          • FROM
          • JOIN
          • INNER JOIN
          • LEFT OUTER JOIN
          • LEFT JOIN
          • RIGHT OUTER JOIN
          • RIGHT JOIN
          • FULL OUTER JOIN
          • FULL JOIN
          • OFFSET
          • LIMIT
          • WHERE
          Supported functions:
          • AVG()
          • SUM()
          • COUNT()
          • MIN()
          • MAX()
          • REDACTION()
          + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1GetQueryResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/query", + method="POST", + json={ + "query": query, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetQueryResponse, + parse_obj_as( + type_=V1GetQueryResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/records/__init__.py b/skyflow/generated/rest/records/__init__.py index b144d479..19c344fa 100644 --- a/skyflow/generated/rest/records/__init__.py +++ b/skyflow/generated/rest/records/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( RecordServiceBulkGetRecordRequestOrderBy, RecordServiceBulkGetRecordRequestRedaction, diff --git a/skyflow/generated/rest/records/client.py b/skyflow/generated/rest/records/client.py index d73e0da0..643e2826 100644 --- a/skyflow/generated/rest/records/client.py +++ b/skyflow/generated/rest/records/client.py @@ -1,30 +1,25 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from ..types.v_1_batch_record import V1BatchRecord -from ..types.v_1_byot import V1Byot + +from .. import core +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.v_1_batch_operation_response import V1BatchOperationResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.pydantic_utilities import parse_obj_as -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction -from .types.record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy +from ..types.v_1_batch_record import V1BatchRecord +from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse from ..types.v_1_bulk_get_record_response import V1BulkGetRecordResponse +from ..types.v_1_byot import V1Byot +from ..types.v_1_delete_file_response import V1DeleteFileResponse +from ..types.v_1_delete_record_response import V1DeleteRecordResponse from ..types.v_1_field_records import V1FieldRecords +from ..types.v_1_get_file_scan_status_response import V1GetFileScanStatusResponse from ..types.v_1_insert_record_response import V1InsertRecordResponse -from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse -from .types.record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction from ..types.v_1_update_record_response import V1UpdateRecordResponse -from ..types.v_1_delete_record_response import V1DeleteRecordResponse -from .. import core -from ..types.v_1_delete_file_response import V1DeleteFileResponse -from ..types.v_1_get_file_scan_status_response import V1GetFileScanStatusResponse -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawRecordsClient, RawRecordsClient +from .types.record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy +from .types.record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction +from .types.record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -32,7 +27,18 @@ class RecordsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRecordsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRecordsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRecordsClient + """ + return self._raw_client def record_service_batch_operation( self, @@ -69,80 +75,19 @@ def record_service_batch_operation( Examples -------- - from skyflow import Skyflow, V1BatchRecord - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_batch_operation( - vault_id="vaultID", - records=[ - V1BatchRecord( - fields={ - "drivers_license_number": "89867453", - "name": "Connor", - "phone_number": "8794523160", - "ssn": "143-89-2306", - }, - table_name="persons", - method="POST", - batch_id="persons-12345", - redaction="PLAIN_TEXT", - tokenization=False, - download_url=False, - upsert="drivers_license_number", - ), - V1BatchRecord( - table_name="persons", - method="GET", - batch_id="persons-12345", - redaction="PLAIN_TEXT", - tokenization=False, - id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", - download_url=True, - ), - ], - ) + from skyflow import Skyflow + from skyflow import V1BatchRecord + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_batch_operation(vault_id='vaultID', records=[V1BatchRecord(fields={'drivers_license_number': '89867453' + , 'name': 'Connor' + , 'phone_number': '8794523160' + , 'ssn': '143-89-2306' + }, table_name='persons', method="POST", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, download_url=False, upsert='drivers_license_number', ), V1BatchRecord(table_name='persons', method="GET", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, id='f1dbc55c-7c9b-495d-9a36-72bb2b619202', download_url=True, )], ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}", - method="POST", - json={ - "records": convert_and_respect_annotation_metadata( - object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" - ), - "continueOnError": continue_on_error, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.record_service_batch_operation( + vault_id, records=records, continue_on_error=continue_on_error, byot=byot, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BatchOperationResponse, - parse_obj_as( - type_=V1BatchOperationResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_bulk_get_record( self, @@ -213,55 +158,25 @@ def record_service_bulk_get_record( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_bulk_get_record( - vault_id="vaultID", - object_name="objectName", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_bulk_get_record(vault_id='vaultID', object_name='objectName', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="GET", - params={ - "skyflow_ids": skyflow_ids, - "redaction": redaction, - "tokenization": tokenization, - "fields": fields, - "offset": offset, - "limit": limit, - "downloadURL": download_url, - "column_name": column_name, - "column_values": column_values, - "order_by": order_by, - }, + _response = self._raw_client.record_service_bulk_get_record( + vault_id, + object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BulkGetRecordResponse, - parse_obj_as( - type_=V1BulkGetRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_insert_record( self, @@ -310,78 +225,30 @@ def record_service_insert_record( Examples -------- - from skyflow import Skyflow, V1FieldRecords - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_insert_record( - vault_id="vaultID", - object_name="objectName", - records=[ - V1FieldRecords( - fields={ - "drivers_license_number": "13456789", - "name": "John", - "phone_number": "1236784563", - "ssn": "123-45-6789", - }, - ), - V1FieldRecords( - fields={ - "drivers_license_number": "98765432", - "name": "James", - "phone_number": "9876543215", - "ssn": "345-45-9876", - }, - ), - ], - tokenization=True, - upsert="drivers_license_number", - homogeneous=False, - ) + from skyflow import Skyflow + from skyflow import V1FieldRecords + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_insert_record(vault_id='vaultID', object_name='objectName', records=[V1FieldRecords(fields={'drivers_license_number': '13456789' + , 'name': 'John' + , 'phone_number': '1236784563' + , 'ssn': '123-45-6789' + }, ), V1FieldRecords(fields={'drivers_license_number': '98765432' + , 'name': 'James' + , 'phone_number': '9876543215' + , 'ssn': '345-45-9876' + }, )], tokenization=True, upsert='drivers_license_number', homogeneous=False, ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="POST", - json={ - "records": convert_and_respect_annotation_metadata( - object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" - ), - "tokenization": tokenization, - "upsert": upsert, - "homogeneous": homogeneous, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.record_service_insert_record( + vault_id, + object_name, + records=records, + tokenization=tokenization, + upsert=upsert, + homogeneous=homogeneous, + byot=byot, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1InsertRecordResponse, - parse_obj_as( - type_=V1InsertRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_bulk_delete_record( self, @@ -416,54 +283,13 @@ def record_service_bulk_delete_record( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_bulk_delete_record( - vault_id="vaultID", - object_name="objectName", - skyflow_ids=[ - "51782ea4-91a5-4430-a06d-f4b76efd3d2f", - "110ce08f-6059-4874-b1ae-7c6651d286ff", - ], - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_bulk_delete_record(vault_id='vaultID', object_name='objectName', skyflow_ids=['51782ea4-91a5-4430-a06d-f4b76efd3d2f', '110ce08f-6059-4874-b1ae-7c6651d286ff'], ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="DELETE", - json={ - "skyflow_ids": skyflow_ids, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.record_service_bulk_delete_record( + vault_id, object_name, skyflow_ids=skyflow_ids, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BulkDeleteRecordResponse, - parse_obj_as( - type_=V1BulkDeleteRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_get_record( self, @@ -514,50 +340,20 @@ def record_service_get_record( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_get_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_get_record(vault_id='vaultID', object_name='objectName', id='ID', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="GET", - params={ - "redaction": redaction, - "tokenization": tokenization, - "fields": fields, - "downloadURL": download_url, - }, + _response = self._raw_client.record_service_get_record( + vault_id, + object_name, + id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1FieldRecords, - parse_obj_as( - type_=V1FieldRecords, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_update_record( self, @@ -601,65 +397,25 @@ def record_service_update_record( Examples -------- - from skyflow import Skyflow, V1FieldRecords - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_update_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - record=V1FieldRecords( - fields={ - "drivers_license_number": "89867453", - "name": "Steve Smith", - "phone_number": "8794523160", - "ssn": "143-89-2306", - }, - ), - tokenization=True, - ) + from skyflow import Skyflow + from skyflow import V1FieldRecords + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_update_record(vault_id='vaultID', object_name='objectName', id='ID', record=V1FieldRecords(fields={'drivers_license_number': '89867453' + , 'name': 'Steve Smith' + , 'phone_number': '8794523160' + , 'ssn': '143-89-2306' + }, ), tokenization=True, ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="PUT", - json={ - "record": convert_and_respect_annotation_metadata( - object_=record, annotation=V1FieldRecords, direction="write" - ), - "tokenization": tokenization, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.record_service_update_record( + vault_id, + object_name, + id, + record=record, + tokenization=tokenization, + byot=byot, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1UpdateRecordResponse, - parse_obj_as( - type_=V1UpdateRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_delete_record( self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -689,44 +445,13 @@ def record_service_delete_record( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.record_service_delete_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.record_service_delete_record(vault_id='vaultID', object_name='objectName', id='ID', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.record_service_delete_record( + vault_id, object_name, id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DeleteRecordResponse, - parse_obj_as( - type_=V1DeleteRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def file_service_upload_file( self, @@ -765,49 +490,13 @@ def file_service_upload_file( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.file_service_upload_file( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.file_service_upload_file(vault_id='vaultID', object_name='objectName', id='ID', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", - method="POST", - data={}, - files={ - "fileColumnName": file_column_name, - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.file_service_upload_file( + vault_id, object_name, id, file_column_name=file_column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1UpdateRecordResponse, - parse_obj_as( - type_=V1UpdateRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def file_service_delete_file( self, @@ -846,45 +535,13 @@ def file_service_delete_file( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.file_service_delete_file( - vault_id="vaultID", - table_name="tableName", - id="ID", - column_name="columnName", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.file_service_delete_file(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", - method="DELETE", - request_options=request_options, + _response = self._raw_client.file_service_delete_file( + vault_id, table_name, id, column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DeleteFileResponse, - parse_obj_as( - type_=V1DeleteFileResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def file_service_get_file_scan_status( self, @@ -923,50 +580,29 @@ def file_service_get_file_scan_status( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.records.file_service_get_file_scan_status( - vault_id="vaultID", - table_name="tableName", - id="ID", - column_name="columnName", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.records.file_service_get_file_scan_status(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", - method="GET", - request_options=request_options, + _response = self._raw_client.file_service_get_file_scan_status( + vault_id, table_name, id, column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetFileScanStatusResponse, - parse_obj_as( - type_=V1GetFileScanStatusResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncRecordsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRecordsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRecordsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRecordsClient + """ + return self._raw_client async def record_service_batch_operation( self, @@ -1003,88 +639,22 @@ async def record_service_batch_operation( Examples -------- + from skyflow import AsyncSkyflow + from skyflow import V1BatchRecord import asyncio - - from skyflow import AsyncSkyflow, V1BatchRecord - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_batch_operation( - vault_id="vaultID", - records=[ - V1BatchRecord( - fields={ - "drivers_license_number": "89867453", - "name": "Connor", - "phone_number": "8794523160", - "ssn": "143-89-2306", - }, - table_name="persons", - method="POST", - batch_id="persons-12345", - redaction="PLAIN_TEXT", - tokenization=False, - download_url=False, - upsert="drivers_license_number", - ), - V1BatchRecord( - table_name="persons", - method="GET", - batch_id="persons-12345", - redaction="PLAIN_TEXT", - tokenization=False, - id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", - download_url=True, - ), - ], - ) - - + await client.records.record_service_batch_operation(vault_id='vaultID', records=[V1BatchRecord(fields={'drivers_license_number': '89867453' + , 'name': 'Connor' + , 'phone_number': '8794523160' + , 'ssn': '143-89-2306' + }, table_name='persons', method="POST", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, download_url=False, upsert='drivers_license_number', ), V1BatchRecord(table_name='persons', method="GET", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, id='f1dbc55c-7c9b-495d-9a36-72bb2b619202', download_url=True, )], ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}", - method="POST", - json={ - "records": convert_and_respect_annotation_metadata( - object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" - ), - "continueOnError": continue_on_error, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.record_service_batch_operation( + vault_id, records=records, continue_on_error=continue_on_error, byot=byot, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BatchOperationResponse, - parse_obj_as( - type_=V1BatchOperationResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_bulk_get_record( self, @@ -1154,64 +724,29 @@ async def record_service_bulk_get_record( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_bulk_get_record( - vault_id="vaultID", - object_name="objectName", - ) - - + await client.records.record_service_bulk_get_record(vault_id='vaultID', object_name='objectName', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="GET", - params={ - "skyflow_ids": skyflow_ids, - "redaction": redaction, - "tokenization": tokenization, - "fields": fields, - "offset": offset, - "limit": limit, - "downloadURL": download_url, - "column_name": column_name, - "column_values": column_values, - "order_by": order_by, - }, + _response = await self._raw_client.record_service_bulk_get_record( + vault_id, + object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BulkGetRecordResponse, - parse_obj_as( - type_=V1BulkGetRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_insert_record( self, @@ -1260,86 +795,33 @@ async def record_service_insert_record( Examples -------- + from skyflow import AsyncSkyflow + from skyflow import V1FieldRecords import asyncio - - from skyflow import AsyncSkyflow, V1FieldRecords - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_insert_record( - vault_id="vaultID", - object_name="objectName", - records=[ - V1FieldRecords( - fields={ - "drivers_license_number": "13456789", - "name": "John", - "phone_number": "1236784563", - "ssn": "123-45-6789", - }, - ), - V1FieldRecords( - fields={ - "drivers_license_number": "98765432", - "name": "James", - "phone_number": "9876543215", - "ssn": "345-45-9876", - }, - ), - ], - tokenization=True, - upsert="drivers_license_number", - homogeneous=False, - ) - - + await client.records.record_service_insert_record(vault_id='vaultID', object_name='objectName', records=[V1FieldRecords(fields={'drivers_license_number': '13456789' + , 'name': 'John' + , 'phone_number': '1236784563' + , 'ssn': '123-45-6789' + }, ), V1FieldRecords(fields={'drivers_license_number': '98765432' + , 'name': 'James' + , 'phone_number': '9876543215' + , 'ssn': '345-45-9876' + }, )], tokenization=True, upsert='drivers_license_number', homogeneous=False, ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="POST", - json={ - "records": convert_and_respect_annotation_metadata( - object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" - ), - "tokenization": tokenization, - "upsert": upsert, - "homogeneous": homogeneous, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.record_service_insert_record( + vault_id, + object_name, + records=records, + tokenization=tokenization, + upsert=upsert, + homogeneous=homogeneous, + byot=byot, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1InsertRecordResponse, - parse_obj_as( - type_=V1InsertRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_bulk_delete_record( self, @@ -1373,63 +855,17 @@ async def record_service_bulk_delete_record( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_bulk_delete_record( - vault_id="vaultID", - object_name="objectName", - skyflow_ids=[ - "51782ea4-91a5-4430-a06d-f4b76efd3d2f", - "110ce08f-6059-4874-b1ae-7c6651d286ff", - ], - ) - - + await client.records.record_service_bulk_delete_record(vault_id='vaultID', object_name='objectName', skyflow_ids=['51782ea4-91a5-4430-a06d-f4b76efd3d2f', '110ce08f-6059-4874-b1ae-7c6651d286ff'], ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", - method="DELETE", - json={ - "skyflow_ids": skyflow_ids, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.record_service_bulk_delete_record( + vault_id, object_name, skyflow_ids=skyflow_ids, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1BulkDeleteRecordResponse, - parse_obj_as( - type_=V1BulkDeleteRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_get_record( self, @@ -1479,59 +915,24 @@ async def record_service_get_record( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_get_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) - - + await client.records.record_service_get_record(vault_id='vaultID', object_name='objectName', id='ID', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="GET", - params={ - "redaction": redaction, - "tokenization": tokenization, - "fields": fields, - "downloadURL": download_url, - }, + _response = await self._raw_client.record_service_get_record( + vault_id, + object_name, + id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1FieldRecords, - parse_obj_as( - type_=V1FieldRecords, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_update_record( self, @@ -1575,73 +976,28 @@ async def record_service_update_record( Examples -------- + from skyflow import AsyncSkyflow + from skyflow import V1FieldRecords import asyncio - - from skyflow import AsyncSkyflow, V1FieldRecords - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_update_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - record=V1FieldRecords( - fields={ - "drivers_license_number": "89867453", - "name": "Steve Smith", - "phone_number": "8794523160", - "ssn": "143-89-2306", - }, - ), - tokenization=True, - ) - - + await client.records.record_service_update_record(vault_id='vaultID', object_name='objectName', id='ID', record=V1FieldRecords(fields={'drivers_license_number': '89867453' + , 'name': 'Steve Smith' + , 'phone_number': '8794523160' + , 'ssn': '143-89-2306' + }, ), tokenization=True, ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="PUT", - json={ - "record": convert_and_respect_annotation_metadata( - object_=record, annotation=V1FieldRecords, direction="write" - ), - "tokenization": tokenization, - "byot": byot, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.record_service_update_record( + vault_id, + object_name, + id, + record=record, + tokenization=tokenization, + byot=byot, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1UpdateRecordResponse, - parse_obj_as( - type_=V1UpdateRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_delete_record( self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -1670,53 +1026,17 @@ async def record_service_delete_record( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.record_service_delete_record( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) - - + await client.records.record_service_delete_record(vault_id='vaultID', object_name='objectName', id='ID', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.record_service_delete_record( + vault_id, object_name, id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DeleteRecordResponse, - parse_obj_as( - type_=V1DeleteRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def file_service_upload_file( self, @@ -1754,58 +1074,17 @@ async def file_service_upload_file( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.file_service_upload_file( - vault_id="vaultID", - object_name="objectName", - id="ID", - ) - - + await client.records.file_service_upload_file(vault_id='vaultID', object_name='objectName', id='ID', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", - method="POST", - data={}, - files={ - "fileColumnName": file_column_name, - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.file_service_upload_file( + vault_id, object_name, id, file_column_name=file_column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1UpdateRecordResponse, - parse_obj_as( - type_=V1UpdateRecordResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def file_service_delete_file( self, @@ -1843,54 +1122,17 @@ async def file_service_delete_file( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.file_service_delete_file( - vault_id="vaultID", - table_name="tableName", - id="ID", - column_name="columnName", - ) - - + await client.records.file_service_delete_file(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", - method="DELETE", - request_options=request_options, + _response = await self._raw_client.file_service_delete_file( + vault_id, table_name, id, column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DeleteFileResponse, - parse_obj_as( - type_=V1DeleteFileResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def file_service_get_file_scan_status( self, @@ -1928,51 +1170,14 @@ async def file_service_get_file_scan_status( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.records.file_service_get_file_scan_status( - vault_id="vaultID", - table_name="tableName", - id="ID", - column_name="columnName", - ) - - + await client.records.file_service_get_file_scan_status(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", - method="GET", - request_options=request_options, + _response = await self._raw_client.file_service_get_file_scan_status( + vault_id, table_name, id, column_name, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1GetFileScanStatusResponse, - parse_obj_as( - type_=V1GetFileScanStatusResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/records/raw_client.py b/skyflow/generated/rest/records/raw_client.py new file mode 100644 index 00000000..3bbed594 --- /dev/null +++ b/skyflow/generated/rest/records/raw_client.py @@ -0,0 +1,1545 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from .. import core +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.not_found_error import NotFoundError +from ..types.v_1_batch_operation_response import V1BatchOperationResponse +from ..types.v_1_batch_record import V1BatchRecord +from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from ..types.v_1_bulk_get_record_response import V1BulkGetRecordResponse +from ..types.v_1_byot import V1Byot +from ..types.v_1_delete_file_response import V1DeleteFileResponse +from ..types.v_1_delete_record_response import V1DeleteRecordResponse +from ..types.v_1_field_records import V1FieldRecords +from ..types.v_1_get_file_scan_status_response import V1GetFileScanStatusResponse +from ..types.v_1_insert_record_response import V1InsertRecordResponse +from ..types.v_1_update_record_response import V1UpdateRecordResponse +from .types.record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy +from .types.record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction +from .types.record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRecordsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def record_service_batch_operation( + self, + vault_id: str, + *, + records: typing.Optional[typing.Sequence[V1BatchRecord]] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1BatchOperationResponse]: + """ + Performs multiple record operations in a single transaction. + + Parameters + ---------- + vault_id : str + ID of the vault. + + records : typing.Optional[typing.Sequence[V1BatchRecord]] + Record operations to perform. + + continue_on_error : typing.Optional[bool] + Continue performing operations on partial errors. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1BatchOperationResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" + ), + "continueOnError": continue_on_error, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BatchOperationResponse, + parse_obj_as( + type_=V1BatchOperationResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_bulk_get_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + redaction: typing.Optional[RecordServiceBulkGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + offset: typing.Optional[str] = None, + limit: typing.Optional[str] = None, + download_url: typing.Optional[bool] = None, + column_name: typing.Optional[str] = None, + column_values: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + order_by: typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1BulkGetRecordResponse]: + """ + Gets the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table that contains the records. + + skyflow_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]] + `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

          If not specified, returns the first 25 records in the table. + + redaction : typing.Optional[RecordServiceBulkGetRecordRequestRedaction] + Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + offset : typing.Optional[str] + Record position at which to start receiving data. + + limit : typing.Optional[str] + Number of record to return. Maximum 25. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + column_name : typing.Optional[str] + Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + column_values : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

          `column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + order_by : typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] + Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1BulkGetRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="GET", + params={ + "skyflow_ids": skyflow_ids, + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "offset": offset, + "limit": limit, + "downloadURL": download_url, + "column_name": column_name, + "column_values": column_values, + "order_by": order_by, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BulkGetRecordResponse, + parse_obj_as( + type_=V1BulkGetRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_insert_record( + self, + vault_id: str, + object_name: str, + *, + records: typing.Optional[typing.Sequence[V1FieldRecords]] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + upsert: typing.Optional[str] = OMIT, + homogeneous: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1InsertRecordResponse]: + """ + Inserts a record in the specified table.

          The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

          Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + records : typing.Optional[typing.Sequence[V1FieldRecords]] + Record values and tokens. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + upsert : typing.Optional[str] + Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

          When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed. + + homogeneous : typing.Optional[bool] + If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1InsertRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" + ), + "tokenization": tokenization, + "upsert": upsert, + "homogeneous": homogeneous, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1InsertRecordResponse, + parse_obj_as( + type_=V1InsertRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_bulk_delete_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1BulkDeleteRecordResponse]: + """ + Deletes the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + skyflow_ids : typing.Optional[typing.Sequence[str]] + `skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1BulkDeleteRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="DELETE", + json={ + "skyflow_ids": skyflow_ids, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BulkDeleteRecordResponse, + parse_obj_as( + type_=V1BulkDeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_get_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + redaction: typing.Optional[RecordServiceGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + download_url: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1FieldRecords]: + """ + Returns the specified record from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + redaction : typing.Optional[RecordServiceGetRecordRequestRedaction] + Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1FieldRecords] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="GET", + params={ + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "downloadURL": download_url, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1FieldRecords, + parse_obj_as( + type_=V1FieldRecords, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_update_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + record: typing.Optional[V1FieldRecords] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1UpdateRecordResponse]: + """ + Updates the specified record in a table.

          When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

          The time-to-live (TTL) for a transient field resets when the field value is updated. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + record : typing.Optional[V1FieldRecords] + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1UpdateRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="PUT", + json={ + "record": convert_and_respect_annotation_metadata( + object_=record, annotation=V1FieldRecords, direction="write" + ), + "tokenization": tokenization, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_delete_record( + self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[V1DeleteRecordResponse]: + """ + Deletes the specified record from a table.

          Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record to delete. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DeleteRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DeleteRecordResponse, + parse_obj_as( + type_=V1DeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def file_service_upload_file( + self, + vault_id: str, + object_name: str, + id: str, + *, + file_column_name: typing.Optional[core.File] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1UpdateRecordResponse]: + """ + Uploads a file to the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + file_column_name : typing.Optional[core.File] + See core.File for more documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1UpdateRecordResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", + method="POST", + data={}, + files={ + **({"fileColumnName": file_column_name} if fileColumnName is not None else {}), + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def file_service_delete_file( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1DeleteFileResponse]: + """ + Deletes a file from the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DeleteFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DeleteFileResponse, + parse_obj_as( + type_=V1DeleteFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def file_service_get_file_scan_status( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1GetFileScanStatusResponse]: + """ + Returns the anti-virus scan status of a file. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1GetFileScanStatusResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetFileScanStatusResponse, + parse_obj_as( + type_=V1GetFileScanStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawRecordsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def record_service_batch_operation( + self, + vault_id: str, + *, + records: typing.Optional[typing.Sequence[V1BatchRecord]] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1BatchOperationResponse]: + """ + Performs multiple record operations in a single transaction. + + Parameters + ---------- + vault_id : str + ID of the vault. + + records : typing.Optional[typing.Sequence[V1BatchRecord]] + Record operations to perform. + + continue_on_error : typing.Optional[bool] + Continue performing operations on partial errors. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1BatchOperationResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1BatchRecord], direction="write" + ), + "continueOnError": continue_on_error, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BatchOperationResponse, + parse_obj_as( + type_=V1BatchOperationResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_bulk_get_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + redaction: typing.Optional[RecordServiceBulkGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + offset: typing.Optional[str] = None, + limit: typing.Optional[str] = None, + download_url: typing.Optional[bool] = None, + column_name: typing.Optional[str] = None, + column_values: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + order_by: typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1BulkGetRecordResponse]: + """ + Gets the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table that contains the records. + + skyflow_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]] + `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

          If not specified, returns the first 25 records in the table. + + redaction : typing.Optional[RecordServiceBulkGetRecordRequestRedaction] + Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + offset : typing.Optional[str] + Record position at which to start receiving data. + + limit : typing.Optional[str] + Number of record to return. Maximum 25. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + column_name : typing.Optional[str] + Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + column_values : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

          `column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + + order_by : typing.Optional[RecordServiceBulkGetRecordRequestOrderBy] + Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1BulkGetRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="GET", + params={ + "skyflow_ids": skyflow_ids, + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "offset": offset, + "limit": limit, + "downloadURL": download_url, + "column_name": column_name, + "column_values": column_values, + "order_by": order_by, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BulkGetRecordResponse, + parse_obj_as( + type_=V1BulkGetRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_insert_record( + self, + vault_id: str, + object_name: str, + *, + records: typing.Optional[typing.Sequence[V1FieldRecords]] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + upsert: typing.Optional[str] = OMIT, + homogeneous: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1InsertRecordResponse]: + """ + Inserts a record in the specified table.

          The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

          Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + records : typing.Optional[typing.Sequence[V1FieldRecords]] + Record values and tokens. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + upsert : typing.Optional[str] + Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

          When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed. + + homogeneous : typing.Optional[bool] + If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1InsertRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="POST", + json={ + "records": convert_and_respect_annotation_metadata( + object_=records, annotation=typing.Sequence[V1FieldRecords], direction="write" + ), + "tokenization": tokenization, + "upsert": upsert, + "homogeneous": homogeneous, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1InsertRecordResponse, + parse_obj_as( + type_=V1InsertRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_bulk_delete_record( + self, + vault_id: str, + object_name: str, + *, + skyflow_ids: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1BulkDeleteRecordResponse]: + """ + Deletes the specified records from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + skyflow_ids : typing.Optional[typing.Sequence[str]] + `skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1BulkDeleteRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}", + method="DELETE", + json={ + "skyflow_ids": skyflow_ids, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1BulkDeleteRecordResponse, + parse_obj_as( + type_=V1BulkDeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_get_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + redaction: typing.Optional[RecordServiceGetRecordRequestRedaction] = None, + tokenization: typing.Optional[bool] = None, + fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + download_url: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1FieldRecords]: + """ + Returns the specified record from a table. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + redaction : typing.Optional[RecordServiceGetRecordRequestRedaction] + Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + + fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

          If not specified, returns all fields. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1FieldRecords] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="GET", + params={ + "redaction": redaction, + "tokenization": tokenization, + "fields": fields, + "downloadURL": download_url, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1FieldRecords, + parse_obj_as( + type_=V1FieldRecords, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_update_record( + self, + vault_id: str, + object_name: str, + id: str, + *, + record: typing.Optional[V1FieldRecords] = OMIT, + tokenization: typing.Optional[bool] = OMIT, + byot: typing.Optional[V1Byot] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1UpdateRecordResponse]: + """ + Updates the specified record in a table.

          When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

          The time-to-live (TTL) for a transient field resets when the field value is updated. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + record : typing.Optional[V1FieldRecords] + + tokenization : typing.Optional[bool] + If `true`, this operation returns tokens for fields with tokenization enabled. + + byot : typing.Optional[V1Byot] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1UpdateRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="PUT", + json={ + "record": convert_and_respect_annotation_metadata( + object_=record, annotation=V1FieldRecords, direction="write" + ), + "tokenization": tokenization, + "byot": byot, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_delete_record( + self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[V1DeleteRecordResponse]: + """ + Deletes the specified record from a table.

          Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record to delete. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DeleteRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DeleteRecordResponse, + parse_obj_as( + type_=V1DeleteRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def file_service_upload_file( + self, + vault_id: str, + object_name: str, + id: str, + *, + file_column_name: typing.Optional[core.File] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1UpdateRecordResponse]: + """ + Uploads a file to the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + object_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + file_column_name : typing.Optional[core.File] + See core.File for more documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1UpdateRecordResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", + method="POST", + data={}, + files={ + **({"fileColumnName": file_column_name} if fileColumnName is not None else {}), + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1UpdateRecordResponse, + parse_obj_as( + type_=V1UpdateRecordResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def file_service_delete_file( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1DeleteFileResponse]: + """ + Deletes a file from the specified record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DeleteFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DeleteFileResponse, + parse_obj_as( + type_=V1DeleteFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def file_service_get_file_scan_status( + self, + vault_id: str, + table_name: str, + id: str, + column_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1GetFileScanStatusResponse]: + """ + Returns the anti-virus scan status of a file. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table. + + id : str + `skyflow_id` of the record. + + column_name : str + Name of the column that contains the file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1GetFileScanStatusResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(table_name)}/{jsonable_encoder(id)}/files/{jsonable_encoder(column_name)}/scan-status", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1GetFileScanStatusResponse, + parse_obj_as( + type_=V1GetFileScanStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/records/types/__init__.py b/skyflow/generated/rest/records/types/__init__.py index 9e9ce24e..62f7c5c2 100644 --- a/skyflow/generated/rest/records/types/__init__.py +++ b/skyflow/generated/rest/records/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .record_service_bulk_get_record_request_order_by import RecordServiceBulkGetRecordRequestOrderBy from .record_service_bulk_get_record_request_redaction import RecordServiceBulkGetRecordRequestRedaction from .record_service_get_record_request_redaction import RecordServiceGetRecordRequestRedaction diff --git a/skyflow/generated/rest/tokens/__init__.py b/skyflow/generated/rest/tokens/__init__.py index f3ea2659..5cde0202 100644 --- a/skyflow/generated/rest/tokens/__init__.py +++ b/skyflow/generated/rest/tokens/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/skyflow/generated/rest/tokens/client.py b/skyflow/generated/rest/tokens/client.py index 641050fe..d7861277 100644 --- a/skyflow/generated/rest/tokens/client.py +++ b/skyflow/generated/rest/tokens/client.py @@ -1,19 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from ..types.v_1_detokenize_record_request import V1DetokenizeRecordRequest + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from ..types.v_1_detokenize_record_request import V1DetokenizeRecordRequest from ..types.v_1_detokenize_response import V1DetokenizeResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.pydantic_utilities import parse_obj_as -from ..errors.not_found_error import NotFoundError -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError from ..types.v_1_tokenize_record_request import V1TokenizeRecordRequest from ..types.v_1_tokenize_response import V1TokenizeResponse -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawTokensClient, RawTokensClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,7 +16,18 @@ class TokensClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTokensClient + """ + return self._raw_client def record_service_detokenize( self, @@ -59,67 +65,19 @@ def record_service_detokenize( Examples -------- - from skyflow import Skyflow, V1DetokenizeRecordRequest - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.tokens.record_service_detokenize( - vault_id="vaultID", - detokenization_parameters=[ - V1DetokenizeRecordRequest( - token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", - redaction="PLAIN_TEXT", - ), - V1DetokenizeRecordRequest( - token="05383487-fcae-42e5-a48e-5bd62a51af12", - redaction="DEFAULT", - ), - ], - download_url=False, - ) + from skyflow import Skyflow + from skyflow import V1DetokenizeRecordRequest + client = Skyflow(token="YOUR_TOKEN", ) + client.tokens.record_service_detokenize(vault_id='vaultID', detokenization_parameters=[V1DetokenizeRecordRequest(token='afbd1074-51c1-4a16-9eee-e2c0ecb52125', redaction="PLAIN_TEXT", ), V1DetokenizeRecordRequest(token='05383487-fcae-42e5-a48e-5bd62a51af12', redaction="DEFAULT", )], download_url=False, ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", - method="POST", - json={ - "detokenizationParameters": convert_and_respect_annotation_metadata( - object_=detokenization_parameters, - annotation=typing.Sequence[V1DetokenizeRecordRequest], - direction="write", - ), - "downloadURL": download_url, - "continueOnError": continue_on_error, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.record_service_detokenize( + vault_id, + detokenization_parameters=detokenization_parameters, + download_url=download_url, + continue_on_error=continue_on_error, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DetokenizeResponse, - parse_obj_as( - type_=V1DetokenizeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def record_service_tokenize( self, @@ -150,58 +108,29 @@ def record_service_tokenize( Examples -------- from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.tokens.record_service_tokenize( - vault_id="vaultID", - ) + client = Skyflow(token="YOUR_TOKEN", ) + client.tokens.record_service_tokenize(vault_id='vaultID', ) """ - _response = self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", - method="POST", - json={ - "tokenizationParameters": convert_and_respect_annotation_metadata( - object_=tokenization_parameters, - annotation=typing.Sequence[V1TokenizeRecordRequest], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.record_service_tokenize( + vault_id, tokenization_parameters=tokenization_parameters, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1TokenizeResponse, - parse_obj_as( - type_=V1TokenizeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncTokensClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTokensClient + """ + return self._raw_client async def record_service_detokenize( self, @@ -239,75 +168,22 @@ async def record_service_detokenize( Examples -------- + from skyflow import AsyncSkyflow + from skyflow import V1DetokenizeRecordRequest import asyncio - - from skyflow import AsyncSkyflow, V1DetokenizeRecordRequest - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.tokens.record_service_detokenize( - vault_id="vaultID", - detokenization_parameters=[ - V1DetokenizeRecordRequest( - token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", - redaction="PLAIN_TEXT", - ), - V1DetokenizeRecordRequest( - token="05383487-fcae-42e5-a48e-5bd62a51af12", - redaction="DEFAULT", - ), - ], - download_url=False, - ) - - + await client.tokens.record_service_detokenize(vault_id='vaultID', detokenization_parameters=[V1DetokenizeRecordRequest(token='afbd1074-51c1-4a16-9eee-e2c0ecb52125', redaction="PLAIN_TEXT", ), V1DetokenizeRecordRequest(token='05383487-fcae-42e5-a48e-5bd62a51af12', redaction="DEFAULT", )], download_url=False, ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", - method="POST", - json={ - "detokenizationParameters": convert_and_respect_annotation_metadata( - object_=detokenization_parameters, - annotation=typing.Sequence[V1DetokenizeRecordRequest], - direction="write", - ), - "downloadURL": download_url, - "continueOnError": continue_on_error, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.record_service_detokenize( + vault_id, + detokenization_parameters=detokenization_parameters, + download_url=download_url, + continue_on_error=continue_on_error, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1DetokenizeResponse, - parse_obj_as( - type_=V1DetokenizeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def record_service_tokenize( self, @@ -337,59 +213,14 @@ async def record_service_tokenize( Examples -------- - import asyncio - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - + import asyncio + client = AsyncSkyflow(token="YOUR_TOKEN", ) async def main() -> None: - await client.tokens.record_service_tokenize( - vault_id="vaultID", - ) - - + await client.tokens.record_service_tokenize(vault_id='vaultID', ) asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", - method="POST", - json={ - "tokenizationParameters": convert_and_respect_annotation_metadata( - object_=tokenization_parameters, - annotation=typing.Sequence[V1TokenizeRecordRequest], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.record_service_tokenize( + vault_id, tokenization_parameters=tokenization_parameters, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - V1TokenizeResponse, - parse_obj_as( - type_=V1TokenizeResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], - parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/skyflow/generated/rest/tokens/raw_client.py b/skyflow/generated/rest/tokens/raw_client.py new file mode 100644 index 00000000..58dfa94d --- /dev/null +++ b/skyflow/generated/rest/tokens/raw_client.py @@ -0,0 +1,318 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.not_found_error import NotFoundError +from ..types.v_1_detokenize_record_request import V1DetokenizeRecordRequest +from ..types.v_1_detokenize_response import V1DetokenizeResponse +from ..types.v_1_tokenize_record_request import V1TokenizeRecordRequest +from ..types.v_1_tokenize_response import V1TokenizeResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTokensClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def record_service_detokenize( + self, + vault_id: str, + *, + detokenization_parameters: typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] = OMIT, + download_url: typing.Optional[bool] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1DetokenizeResponse]: + """ + Returns records that correspond to the specified tokens. + + Parameters + ---------- + vault_id : str + ID of the vault. + + detokenization_parameters : typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] + Detokenization details. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + continue_on_error : typing.Optional[bool] + If `true`, the detokenization request continues even if an error occurs. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DetokenizeResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", + method="POST", + json={ + "detokenizationParameters": convert_and_respect_annotation_metadata( + object_=detokenization_parameters, + annotation=typing.Sequence[V1DetokenizeRecordRequest], + direction="write", + ), + "downloadURL": download_url, + "continueOnError": continue_on_error, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetokenizeResponse, + parse_obj_as( + type_=V1DetokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + def record_service_tokenize( + self, + vault_id: str, + *, + tokenization_parameters: typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1TokenizeResponse]: + """ + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

          Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + Parameters + ---------- + vault_id : str + ID of the vault. + + tokenization_parameters : typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] + Tokenization details. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1TokenizeResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", + method="POST", + json={ + "tokenizationParameters": convert_and_respect_annotation_metadata( + object_=tokenization_parameters, + annotation=typing.Sequence[V1TokenizeRecordRequest], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1TokenizeResponse, + parse_obj_as( + type_=V1TokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + +class AsyncRawTokensClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def record_service_detokenize( + self, + vault_id: str, + *, + detokenization_parameters: typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] = OMIT, + download_url: typing.Optional[bool] = OMIT, + continue_on_error: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1DetokenizeResponse]: + """ + Returns records that correspond to the specified tokens. + + Parameters + ---------- + vault_id : str + ID of the vault. + + detokenization_parameters : typing.Optional[typing.Sequence[V1DetokenizeRecordRequest]] + Detokenization details. + + download_url : typing.Optional[bool] + If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + + continue_on_error : typing.Optional[bool] + If `true`, the detokenization request continues even if an error occurs. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DetokenizeResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/detokenize", + method="POST", + json={ + "detokenizationParameters": convert_and_respect_annotation_metadata( + object_=detokenization_parameters, + annotation=typing.Sequence[V1DetokenizeRecordRequest], + direction="write", + ), + "downloadURL": download_url, + "continueOnError": continue_on_error, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetokenizeResponse, + parse_obj_as( + type_=V1DetokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + + async def record_service_tokenize( + self, + vault_id: str, + *, + tokenization_parameters: typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1TokenizeResponse]: + """ + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

          Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + Parameters + ---------- + vault_id : str + ID of the vault. + + tokenization_parameters : typing.Optional[typing.Sequence[V1TokenizeRecordRequest]] + Tokenization details. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1TokenizeResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/vaults/{jsonable_encoder(vault_id)}/tokenize", + method="POST", + json={ + "tokenizationParameters": convert_and_respect_annotation_metadata( + object_=tokenization_parameters, + annotation=typing.Sequence[V1TokenizeRecordRequest], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1TokenizeResponse, + parse_obj_as( + type_=V1TokenizeResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Dict[str, typing.Optional[typing.Any]], + parse_obj_as( + type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index d2112008..093756ee 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .audit_event_audit_resource_type import AuditEventAuditResourceType from .audit_event_context import AuditEventContext from .audit_event_data import AuditEventData diff --git a/skyflow/generated/rest/types/audit_event_context.py b/skyflow/generated/rest/types/audit_event_context.py index 178137ec..92287d5a 100644 --- a/skyflow/generated/rest/types/audit_event_context.py +++ b/skyflow/generated/rest/types/audit_event_context.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from .v_1_member_type import V1MemberType +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata from .context_access_type import ContextAccessType from .context_auth_mode import ContextAuthMode -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .v_1_member_type import V1MemberType class AuditEventContext(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/audit_event_data.py b/skyflow/generated/rest/types/audit_event_data.py index 78385d17..9449de24 100644 --- a/skyflow/generated/rest/types/audit_event_data.py +++ b/skyflow/generated/rest/types/audit_event_data.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AuditEventData(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/audit_event_http_info.py b/skyflow/generated/rest/types/audit_event_http_info.py index 14df874b..093119e3 100644 --- a/skyflow/generated/rest/types/audit_event_http_info.py +++ b/skyflow/generated/rest/types/audit_event_http_info.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class AuditEventHttpInfo(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/googlerpc_status.py b/skyflow/generated/rest/types/googlerpc_status.py index aceede7e..f0a885b4 100644 --- a/skyflow/generated/rest/types/googlerpc_status.py +++ b/skyflow/generated/rest/types/googlerpc_status.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .protobuf_any import ProtobufAny -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .protobuf_any import ProtobufAny class GooglerpcStatus(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/protobuf_any.py b/skyflow/generated/rest/types/protobuf_any.py index 9d141254..9062870c 100644 --- a/skyflow/generated/rest/types/protobuf_any.py +++ b/skyflow/generated/rest/types/protobuf_any.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class ProtobufAny(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_audit_after_options.py b/skyflow/generated/rest/types/v_1_audit_after_options.py index 0f078667..6d10157e 100644 --- a/skyflow/generated/rest/types/v_1_audit_after_options.py +++ b/skyflow/generated/rest/types/v_1_audit_after_options.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1AuditAfterOptions(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_audit_event_response.py b/skyflow/generated/rest/types/v_1_audit_event_response.py index 2ff30533..d82e8999 100644 --- a/skyflow/generated/rest/types/v_1_audit_event_response.py +++ b/skyflow/generated/rest/types/v_1_audit_event_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_event_data import AuditEventData -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1AuditEventResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_audit_response.py b/skyflow/generated/rest/types/v_1_audit_response.py index 617c1fd9..fe0b358a 100644 --- a/skyflow/generated/rest/types/v_1_audit_response.py +++ b/skyflow/generated/rest/types/v_1_audit_response.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_audit_response_event import V1AuditResponseEvent + import pydantic import typing_extensions -from .v_1_audit_after_options import V1AuditAfterOptions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .v_1_audit_after_options import V1AuditAfterOptions +from .v_1_audit_response_event import V1AuditResponseEvent class V1AuditResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_audit_response_event.py b/skyflow/generated/rest/types/v_1_audit_response_event.py index b623257e..dbea54a2 100644 --- a/skyflow/generated/rest/types/v_1_audit_response_event.py +++ b/skyflow/generated/rest/types/v_1_audit_response_event.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .audit_event_context import AuditEventContext -from .v_1_audit_response_event_request import V1AuditResponseEventRequest -from .v_1_audit_event_response import V1AuditEventResponse + +import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .audit_event_context import AuditEventContext +from .v_1_audit_event_response import V1AuditEventResponse +from .v_1_audit_response_event_request import V1AuditResponseEventRequest class V1AuditResponseEvent(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_audit_response_event_request.py b/skyflow/generated/rest/types/v_1_audit_response_event_request.py index 5eb9a709..6127818d 100644 --- a/skyflow/generated/rest/types/v_1_audit_response_event_request.py +++ b/skyflow/generated/rest/types/v_1_audit_response_event_request.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .audit_event_data import AuditEventData + +import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -import pydantic -from .request_action_type import RequestActionType from .audit_event_audit_resource_type import AuditEventAuditResourceType +from .audit_event_data import AuditEventData from .audit_event_http_info import AuditEventHttpInfo -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .request_action_type import RequestActionType class V1AuditResponseEventRequest(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_batch_operation_response.py b/skyflow/generated/rest/types/v_1_batch_operation_response.py index 72643ce2..9e7144a6 100644 --- a/skyflow/generated/rest/types/v_1_batch_operation_response.py +++ b/skyflow/generated/rest/types/v_1_batch_operation_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class V1BatchOperationResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_batch_record.py b/skyflow/generated/rest/types/v_1_batch_record.py index 7dca5fda..d531fc5e 100644 --- a/skyflow/generated/rest/types/v_1_batch_record.py +++ b/skyflow/generated/rest/types/v_1_batch_record.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata from .batch_record_method import BatchRecordMethod from .redaction_enum_redaction import RedactionEnumRedaction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1BatchRecord(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_bin_list_response.py b/skyflow/generated/rest/types/v_1_bin_list_response.py index bd4f69b9..24d7fad7 100644 --- a/skyflow/generated/rest/types/v_1_bin_list_response.py +++ b/skyflow/generated/rest/types/v_1_bin_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_card import V1Card + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_card import V1Card class V1BinListResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py b/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py index 6d03bccd..bae86dc3 100644 --- a/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py +++ b/skyflow/generated/rest/types/v_1_bulk_delete_record_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class V1BulkDeleteRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_bulk_get_record_response.py b/skyflow/generated/rest/types/v_1_bulk_get_record_response.py index 7244bc7f..385a8132 100644 --- a/skyflow/generated/rest/types/v_1_bulk_get_record_response.py +++ b/skyflow/generated/rest/types/v_1_bulk_get_record_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_field_records import V1FieldRecords + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_field_records import V1FieldRecords class V1BulkGetRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_card.py b/skyflow/generated/rest/types/v_1_card.py index c5a641b1..1c862169 100644 --- a/skyflow/generated/rest/types/v_1_card.py +++ b/skyflow/generated/rest/types/v_1_card.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class V1Card(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_delete_file_response.py b/skyflow/generated/rest/types/v_1_delete_file_response.py index 6e995cec..927517e9 100644 --- a/skyflow/generated/rest/types/v_1_delete_file_response.py +++ b/skyflow/generated/rest/types/v_1_delete_file_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1DeleteFileResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_delete_record_response.py b/skyflow/generated/rest/types/v_1_delete_record_response.py index 366cb30b..4007abca 100644 --- a/skyflow/generated/rest/types/v_1_delete_record_response.py +++ b/skyflow/generated/rest/types/v_1_delete_record_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1DeleteRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_detokenize_record_request.py b/skyflow/generated/rest/types/v_1_detokenize_record_request.py index b6e225c3..ef75d346 100644 --- a/skyflow/generated/rest/types/v_1_detokenize_record_request.py +++ b/skyflow/generated/rest/types/v_1_detokenize_record_request.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .redaction_enum_redaction import RedactionEnumRedaction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1DetokenizeRecordRequest(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_detokenize_record_response.py b/skyflow/generated/rest/types/v_1_detokenize_record_response.py index bbc26aa0..4cce821c 100644 --- a/skyflow/generated/rest/types/v_1_detokenize_record_response.py +++ b/skyflow/generated/rest/types/v_1_detokenize_record_response.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions -from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType class V1DetokenizeRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_detokenize_response.py b/skyflow/generated/rest/types/v_1_detokenize_response.py index 63e97c84..34759550 100644 --- a/skyflow/generated/rest/types/v_1_detokenize_response.py +++ b/skyflow/generated/rest/types/v_1_detokenize_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_detokenize_record_response import V1DetokenizeRecordResponse + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_detokenize_record_response import V1DetokenizeRecordResponse class V1DetokenizeResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_field_records.py b/skyflow/generated/rest/types/v_1_field_records.py index 07a8bf58..fbedfcfe 100644 --- a/skyflow/generated/rest/types/v_1_field_records.py +++ b/skyflow/generated/rest/types/v_1_field_records.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1FieldRecords(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_get_auth_token_response.py b/skyflow/generated/rest/types/v_1_get_auth_token_response.py index d414ed7c..c4db65a0 100644 --- a/skyflow/generated/rest/types/v_1_get_auth_token_response.py +++ b/skyflow/generated/rest/types/v_1_get_auth_token_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ..core.serialization import FieldMetadata + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata class V1GetAuthTokenResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py b/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py index 71349961..70801b56 100644 --- a/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py +++ b/skyflow/generated/rest/types/v_1_get_file_scan_status_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_file_av_scan_status import V1FileAvScanStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_file_av_scan_status import V1FileAvScanStatus class V1GetFileScanStatusResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_get_query_response.py b/skyflow/generated/rest/types/v_1_get_query_response.py index 778a517a..0edf0513 100644 --- a/skyflow/generated/rest/types/v_1_get_query_response.py +++ b/skyflow/generated/rest/types/v_1_get_query_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_field_records import V1FieldRecords + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_field_records import V1FieldRecords class V1GetQueryResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_insert_record_response.py b/skyflow/generated/rest/types/v_1_insert_record_response.py index a3344c92..d303a2f9 100644 --- a/skyflow/generated/rest/types/v_1_insert_record_response.py +++ b/skyflow/generated/rest/types/v_1_insert_record_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_record_meta_properties import V1RecordMetaProperties + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_record_meta_properties import V1RecordMetaProperties class V1InsertRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_record_meta_properties.py b/skyflow/generated/rest/types/v_1_record_meta_properties.py index a4eb95b7..bc51f5c5 100644 --- a/skyflow/generated/rest/types/v_1_record_meta_properties.py +++ b/skyflow/generated/rest/types/v_1_record_meta_properties.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1RecordMetaProperties(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_tokenize_record_request.py b/skyflow/generated/rest/types/v_1_tokenize_record_request.py index 9fba53a2..1801aeb7 100644 --- a/skyflow/generated/rest/types/v_1_tokenize_record_request.py +++ b/skyflow/generated/rest/types/v_1_tokenize_record_request.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1TokenizeRecordRequest(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_tokenize_record_response.py b/skyflow/generated/rest/types/v_1_tokenize_record_response.py index c105e9fc..fd449283 100644 --- a/skyflow/generated/rest/types/v_1_tokenize_record_response.py +++ b/skyflow/generated/rest/types/v_1_tokenize_record_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1TokenizeRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_tokenize_response.py b/skyflow/generated/rest/types/v_1_tokenize_response.py index 0e1886b4..f882cc24 100644 --- a/skyflow/generated/rest/types/v_1_tokenize_response.py +++ b/skyflow/generated/rest/types/v_1_tokenize_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .v_1_tokenize_record_response import V1TokenizeRecordResponse + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_tokenize_record_response import V1TokenizeRecordResponse class V1TokenizeResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_update_record_response.py b/skyflow/generated/rest/types/v_1_update_record_response.py index be6da8fb..c17a3933 100644 --- a/skyflow/generated/rest/types/v_1_update_record_response.py +++ b/skyflow/generated/rest/types/v_1_update_record_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1UpdateRecordResponse(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_vault_field_mapping.py b/skyflow/generated/rest/types/v_1_vault_field_mapping.py index a567d639..c681b94d 100644 --- a/skyflow/generated/rest/types/v_1_vault_field_mapping.py +++ b/skyflow/generated/rest/types/v_1_vault_field_mapping.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class V1VaultFieldMapping(UniversalBaseModel): diff --git a/skyflow/generated/rest/types/v_1_vault_schema_config.py b/skyflow/generated/rest/types/v_1_vault_schema_config.py index a3f3f0b6..b61e30ea 100644 --- a/skyflow/generated/rest/types/v_1_vault_schema_config.py +++ b/skyflow/generated/rest/types/v_1_vault_schema_config.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .v_1_vault_field_mapping import V1VaultFieldMapping -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class V1VaultSchemaConfig(UniversalBaseModel): diff --git a/skyflow/generated/rest/version.py b/skyflow/generated/rest/version.py index f8d02ff4..5a6bc65e 100644 --- a/skyflow/generated/rest/version.py +++ b/skyflow/generated/rest/version.py @@ -1 +1 @@ -__version__ = '2.0.0b1.dev0+3d4ee51' +__version__ = "2.0.0" \ No newline at end of file From 463ffc263c8fab4a85fb6ef7d16a57dfb9b04a48 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Fri, 2 May 2025 20:27:08 +0530 Subject: [PATCH 22/60] SK-1909 Handle API error cases after fern migration --- skyflow/utils/_utils.py | 30 +++++++++++++---- skyflow/vault/controller/_vault.py | 4 +-- tests/utils/test__utils.py | 22 ++++++------ tests/vault/controller/test__vault.py | 48 +++++++++++++++------------ 4 files changed, 63 insertions(+), 41 deletions(-) diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 8f035e93..13556af1 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -13,6 +13,7 @@ from skyflow.error import SkyflowError from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse +from skyflow.generated.rest.core.http_response import HttpResponse from skyflow.utils.logger import log_error_log from . import SkyflowMessages, SDK_VERSION from .constants import PROTOCOL @@ -192,11 +193,16 @@ def get_metrics(): def parse_insert_response(api_response, continue_on_error): + # Retrieve the headers and data from the API response + api_response_headers = api_response.headers + api_response_data = api_response.data + # Retrieve the request ID from the headers + request_id = api_response_headers.get('x-request-id') inserted_fields = [] errors = [] insert_response = InsertResponse() if continue_on_error: - for idx, response in enumerate(api_response.responses): + for idx, response in enumerate(api_response_data.responses): if response['Status'] == 200: body = response['Body'] if 'records' in body: @@ -212,6 +218,7 @@ def parse_insert_response(api_response, continue_on_error): elif response['Status'] == 400: error = { 'request_index': idx, + 'request_id': request_id, 'error': response['Body']['error'] } errors.append(error) @@ -220,7 +227,7 @@ def parse_insert_response(api_response, continue_on_error): insert_response.errors = errors else: - for record in api_response.records: + for record in api_response_data.records: field_data = { 'skyflow_id': record.skyflow_id } @@ -265,18 +272,24 @@ def parse_get_response(api_response: V1BulkGetRecordResponse): return get_response -def parse_detokenize_response(api_response: V1DetokenizeResponse): +def parse_detokenize_response(api_response: HttpResponse[V1DetokenizeResponse]): + # Retrieve the headers and data from the API response + api_response_headers = api_response.headers + api_response_data = api_response.data + # Retrieve the request ID from the headers + request_id = api_response_headers.get('x-request-id') detokenized_fields = [] errors = [] - for record in api_response.records: + for record in api_response_data.records: if record.error: errors.append({ "token": record.token, - "error": record.error + "error": record.error, + "request_id": request_id }) else: - value_type = record.value_type.value if record.value_type else None + value_type = record.value_type if record.value_type else None detokenized_fields.append({ "token": record.token, "value": record.value, @@ -372,7 +385,10 @@ def handle_exception(error, logger): def handle_json_error(err, data, request_id, logger): try: - description = json.loads(data) + if isinstance(data, dict): # If data is already a dict + description = data + else: + description = json.loads(data) status_code = description.get('error', {}).get('http_code', 500) # Default to 500 if not found http_status = description.get('error', {}).get('http_status') grpc_code = description.get('error', {}).get('grpc_code') diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index cabd82db..cef5ffa9 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -68,7 +68,7 @@ def insert(self, request: InsertRequest): validate_insert_request(self.__vault_client.get_logger(), request) log_info(SkyflowMessages.Info.INSERT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() - records_api = self.__vault_client.get_records_api() + records_api = self.__vault_client.get_records_api().with_raw_response insert_body = self.__build_insert_body(request) try: @@ -196,7 +196,7 @@ def detokenize(self, request: DetokenizeRequest): ) for item in request.data ] - tokens_api = self.__vault_client.get_tokens_api() + tokens_api = self.__vault_client.get_tokens_api().with_raw_response try: log_info(SkyflowMessages.Info.DETOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) api_response = tokens_api.record_service_detokenize( diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index e70afc0e..a1254932 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -183,21 +183,22 @@ def test_construct_invoke_connection_request_with_form_date_content_type(self): def test_parse_insert_response(self): api_response = Mock() - api_response.responses = [ + api_response.headers = {"x-request-id": "12345", "content-type": "application/json"} + api_response.data = Mock(responses=[ {"Status": 200, "Body": {"records": [{"skyflow_id": "id1"}]}}, {"Status": 400, "Body": {"error": TEST_ERROR_MESSAGE}} - ] + ]) result = parse_insert_response(api_response, continue_on_error=True) self.assertEqual(len(result.inserted_fields), 1) self.assertEqual(len(result.errors), 1) def test_parse_insert_response_continue_on_error_false(self): mock_api_response = Mock() - mock_api_response.records = [ + mock_api_response.headers = {"x-request-id": "12345", "content-type": "application/json"} + mock_api_response.data = Mock(records=[ Mock(skyflow_id="id_1", tokens={"token1": "token_value1"}), Mock(skyflow_id="id_2", tokens={"token2": "token_value2"}) - ] - + ]) result = parse_insert_response(mock_api_response, continue_on_error=False) self.assertIsInstance(result, InsertResponse) @@ -252,11 +253,12 @@ def test_parse_get_response_successful(self): def test_parse_detokenize_response_with_mixed_records(self): mock_api_response = Mock() - mock_api_response.records = [ - Mock(token="token1", value="value1", value_type=Mock(value="Type1"), error=None), + mock_api_response.headers = {"x-request-id": "12345", "content-type": "application/json"} + mock_api_response.data = Mock(records=[ + Mock(token="token1", value="value1", value_type="Type1", error=None), Mock(token="token2", value=None, value_type=None, error="Some error"), - Mock(token="token3", value="value3", value_type=Mock(value="Type2"), error=None), - ] + Mock(token="token3", value="value3", value_type="Type2", error=None), + ]) result = parse_detokenize_response(mock_api_response) self.assertIsInstance(result, DetokenizeResponse) @@ -267,7 +269,7 @@ def test_parse_detokenize_response_with_mixed_records(self): ] expected_errors = [ - {"token": "token2", "error": "Some error"} + {"token": "token2", "error": "Some error", "request_id": "12345"} ] self.assertEqual(result.detokenized_fields, expected_detokenized_fields) diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 89046e65..ea59189e 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -48,10 +48,12 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) # Mock API response to contain a mix of successful and failed insertions mock_api_response = Mock() - mock_api_response.responses = [ - {"Status": 200, "Body": {"records": [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]}}, - {"Status": 400, "Body": {"error": "Insert error for record 2"}} - ] + mock_api_response.data = { + "responses":[ + {"Status": 200, "Body": {"records": [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]}}, + {"Status": 400, "Body": {"error": "Insert error for record 2"}} + ] + } # Expected parsed response expected_inserted_fields = [ @@ -65,14 +67,14 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) # Set the return value for the parse response mock_parse_response.return_value = expected_response records_api = self.vault_client.get_records_api.return_value - records_api.record_service_batch_operation.return_value = mock_api_response + records_api.with_raw_response.record_service_batch_operation.return_value = mock_api_response # Call the insert function result = self.vault.insert(request) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_batch_operation.assert_called_once_with( + records_api.with_raw_response.record_service_batch_operation.assert_called_once_with( VAULT_ID, records=expected_body, continue_on_error=True, @@ -107,8 +109,8 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Mock API response for a successful insert mock_api_response = Mock() - mock_api_response.records = [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}] - + mock_api_response.data = {"records":[{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]} + # Expected parsed response expected_inserted_fields = [{'skyflow_id': 'id1', 'token_field': 'token_val1'}] expected_response = InsertResponse(inserted_fields=expected_inserted_fields) @@ -116,14 +118,14 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Set the return value for the parse response mock_parse_response.return_value = expected_response records_api = self.vault_client.get_records_api.return_value - records_api.record_service_insert_record.return_value = mock_api_response + records_api.with_raw_response.record_service_insert_record.return_value = mock_api_response # Call the insert function result = self.vault.insert(request) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_insert_record.assert_called_once_with( + records_api.with_raw_response.record_service_insert_record.assert_called_once_with( VAULT_ID, TABLE_NAME, records=expected_body, @@ -149,7 +151,7 @@ def test_insert_handles_generic_error(self, mock_validate): with self.assertRaises(Exception): self.vault.insert(request) - records_api.record_service_insert_record.assert_called_once() + records_api.with_raw_response.record_service_insert_record.assert_called_once() @patch("skyflow.vault.controller._vault.validate_insert_request") @patch("skyflow.vault.controller._vault.parse_insert_response") @@ -174,8 +176,8 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Mock API response for a successful insert mock_api_response = Mock() - mock_api_response.records = [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}] - + mock_api_response.data = {"records":[{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]} + # Expected parsed response expected_inserted_fields = [{'skyflow_id': 'id1', 'token_field': 'token_val1'}] expected_response = InsertResponse(inserted_fields=expected_inserted_fields) @@ -183,14 +185,14 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Set the return value for the parse response mock_parse_response.return_value = expected_response records_api = self.vault_client.get_records_api.return_value - records_api.record_service_insert_record.return_value = mock_api_response + records_api.with_raw_response.record_service_insert_record.return_value = mock_api_response # Call the insert function result = self.vault.insert(request) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_insert_record.assert_called_once_with( + records_api.with_raw_response.record_service_insert_record.assert_called_once_with( VAULT_ID, TABLE_NAME, records=expected_body, @@ -528,10 +530,12 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): # Mock API response mock_api_response = Mock() - mock_api_response.records = [ - Mock(token="token1", value="value1", value_type=Mock(value="STRING"), error=None), - Mock(token="token2", value="value2", value_type=Mock(value="STRING"), error=None) - ] + mock_api_response.data = { + "records":[ + Mock(token="token1", value="value1", value_type=Mock(value="STRING"), error=None), + Mock(token="token2", value="value2", value_type=Mock(value="STRING"), error=None) + ] + } # Expected parsed response expected_fields = [ @@ -543,14 +547,14 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): # Set the return value for parse_detokenize_response mock_parse_response.return_value = expected_response tokens_api = self.vault_client.get_tokens_api.return_value - tokens_api.record_service_detokenize.return_value = mock_api_response + tokens_api.with_raw_response.record_service_detokenize.return_value = mock_api_response # Call the detokenize function result = self.vault.detokenize(request) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - tokens_api.record_service_detokenize.assert_called_once_with( + tokens_api.with_raw_response.record_service_detokenize.assert_called_once_with( VAULT_ID, detokenization_parameters=expected_tokens_list, continue_on_error=False @@ -582,7 +586,7 @@ def test_detokenize_handles_generic_error(self, mock_validate): with self.assertRaises(Exception): self.vault.detokenize(request) - tokens_api.record_service_detokenize.assert_called_once() + tokens_api.with_raw_response.record_service_detokenize.assert_called_once() @patch("skyflow.vault.controller._vault.validate_tokenize_request") @patch("skyflow.vault.controller._vault.parse_tokenize_response") From f2ce4529143b344c4f28143ffa5b29589d4becf9 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Mon, 12 May 2025 11:57:57 +0530 Subject: [PATCH 23/60] SK-1909 Update fern generated code --- skyflow/generated/rest/audit/raw_client.py | 18 +- .../rest/authentication/raw_client.py | 38 ++-- .../generated/rest/bin_lookup/raw_client.py | 18 +- skyflow/generated/rest/core/client_wrapper.py | 2 +- .../rest/errors/bad_request_error.py | 8 +- .../generated/rest/errors/not_found_error.py | 8 +- .../rest/errors/unauthorized_error.py | 8 +- skyflow/generated/rest/query/raw_client.py | 18 +- skyflow/generated/rest/records/raw_client.py | 180 ++++++++++-------- skyflow/generated/rest/tokens/raw_client.py | 36 ++-- 10 files changed, 191 insertions(+), 143 deletions(-) diff --git a/skyflow/generated/rest/audit/raw_client.py b/skyflow/generated/rest/audit/raw_client.py index 9762e46d..b67b025e 100644 --- a/skyflow/generated/rest/audit/raw_client.py +++ b/skyflow/generated/rest/audit/raw_client.py @@ -242,18 +242,19 @@ def audit_service_list_audit_events( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawAuditClient: @@ -468,15 +469,16 @@ async def audit_service_list_audit_events( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/authentication/raw_client.py b/skyflow/generated/rest/authentication/raw_client.py index 0c2778c2..bb1c2ed7 100644 --- a/skyflow/generated/rest/authentication/raw_client.py +++ b/skyflow/generated/rest/authentication/raw_client.py @@ -92,38 +92,41 @@ def authentication_service_get_auth_token( return HttpResponse(response=_response, data=_data) if _response.status_code == 400: raise BadRequestError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) if _response.status_code == 401: raise UnauthorizedError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawAuthenticationClient: @@ -201,35 +204,38 @@ async def authentication_service_get_auth_token( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 400: raise BadRequestError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) if _response.status_code == 401: raise UnauthorizedError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/bin_lookup/raw_client.py b/skyflow/generated/rest/bin_lookup/raw_client.py index c021d684..90202931 100644 --- a/skyflow/generated/rest/bin_lookup/raw_client.py +++ b/skyflow/generated/rest/bin_lookup/raw_client.py @@ -83,18 +83,19 @@ def bin_list_service_list_cards_of_bin( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawBinLookupClient: @@ -163,15 +164,16 @@ async def bin_list_service_list_cards_of_bin( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index 2c55b8d9..b1396aef 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -22,7 +22,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "skyflow.generated.rest", - "X-Fern-SDK-Version": "0.0.163", + "X-Fern-SDK-Version": "0.0.166", } headers["Authorization"] = f"Bearer {self._get_token()}" return headers diff --git a/skyflow/generated/rest/errors/bad_request_error.py b/skyflow/generated/rest/errors/bad_request_error.py index 5f24fa6d..c5d0db48 100644 --- a/skyflow/generated/rest/errors/bad_request_error.py +++ b/skyflow/generated/rest/errors/bad_request_error.py @@ -6,5 +6,9 @@ class BadRequestError(ApiError): - def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): - super().__init__(status_code=400, body=body) + def __init__( + self, + body: typing.Dict[str, typing.Optional[typing.Any]], + headers: typing.Optional[typing.Dict[str, str]] = None, + ): + super().__init__(status_code=400, headers=headers, body=body) diff --git a/skyflow/generated/rest/errors/not_found_error.py b/skyflow/generated/rest/errors/not_found_error.py index 68977121..66307415 100644 --- a/skyflow/generated/rest/errors/not_found_error.py +++ b/skyflow/generated/rest/errors/not_found_error.py @@ -6,5 +6,9 @@ class NotFoundError(ApiError): - def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): - super().__init__(status_code=404, body=body) + def __init__( + self, + body: typing.Dict[str, typing.Optional[typing.Any]], + headers: typing.Optional[typing.Dict[str, str]] = None, + ): + super().__init__(status_code=404, headers=headers, body=body) diff --git a/skyflow/generated/rest/errors/unauthorized_error.py b/skyflow/generated/rest/errors/unauthorized_error.py index cd97f14d..3d58c2e6 100644 --- a/skyflow/generated/rest/errors/unauthorized_error.py +++ b/skyflow/generated/rest/errors/unauthorized_error.py @@ -6,5 +6,9 @@ class UnauthorizedError(ApiError): - def __init__(self, body: typing.Dict[str, typing.Optional[typing.Any]]): - super().__init__(status_code=401, body=body) + def __init__( + self, + body: typing.Dict[str, typing.Optional[typing.Any]], + headers: typing.Optional[typing.Dict[str, str]] = None, + ): + super().__init__(status_code=401, headers=headers, body=body) diff --git a/skyflow/generated/rest/query/raw_client.py b/skyflow/generated/rest/query/raw_client.py index b6a201a1..897d1e2d 100644 --- a/skyflow/generated/rest/query/raw_client.py +++ b/skyflow/generated/rest/query/raw_client.py @@ -70,18 +70,19 @@ def query_service_execute_query( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawQueryClient: @@ -138,15 +139,16 @@ async def query_service_execute_query( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/records/raw_client.py b/skyflow/generated/rest/records/raw_client.py index 3bbed594..55b874c6 100644 --- a/skyflow/generated/rest/records/raw_client.py +++ b/skyflow/generated/rest/records/raw_client.py @@ -96,18 +96,19 @@ def record_service_batch_operation( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_bulk_get_record( self, @@ -204,18 +205,19 @@ def record_service_bulk_get_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_insert_record( self, @@ -292,18 +294,19 @@ def record_service_insert_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_bulk_delete_record( self, @@ -359,18 +362,19 @@ def record_service_bulk_delete_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_get_record( self, @@ -441,18 +445,19 @@ def record_service_get_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_update_record( self, @@ -522,18 +527,19 @@ def record_service_update_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_delete_record( self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -577,18 +583,19 @@ def record_service_delete_record( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def file_service_upload_file( self, @@ -646,18 +653,19 @@ def file_service_upload_file( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def file_service_delete_file( self, @@ -710,18 +718,19 @@ def file_service_delete_file( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def file_service_get_file_scan_status( self, @@ -774,18 +783,19 @@ def file_service_get_file_scan_status( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawRecordsClient: @@ -853,18 +863,19 @@ async def record_service_batch_operation( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_bulk_get_record( self, @@ -961,18 +972,19 @@ async def record_service_bulk_get_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_insert_record( self, @@ -1049,18 +1061,19 @@ async def record_service_insert_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_bulk_delete_record( self, @@ -1116,18 +1129,19 @@ async def record_service_bulk_delete_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_get_record( self, @@ -1198,18 +1212,19 @@ async def record_service_get_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_update_record( self, @@ -1279,18 +1294,19 @@ async def record_service_update_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_delete_record( self, vault_id: str, object_name: str, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -1334,18 +1350,19 @@ async def record_service_delete_record( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def file_service_upload_file( self, @@ -1403,18 +1420,19 @@ async def file_service_upload_file( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def file_service_delete_file( self, @@ -1467,18 +1485,19 @@ async def file_service_delete_file( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def file_service_get_file_scan_status( self, @@ -1531,15 +1550,16 @@ async def file_service_get_file_scan_status( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/tokens/raw_client.py b/skyflow/generated/rest/tokens/raw_client.py index 58dfa94d..057b9f68 100644 --- a/skyflow/generated/rest/tokens/raw_client.py +++ b/skyflow/generated/rest/tokens/raw_client.py @@ -88,18 +88,19 @@ def record_service_detokenize( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) def record_service_tokenize( self, @@ -155,18 +156,19 @@ def record_service_tokenize( return HttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) class AsyncRawTokensClient: @@ -237,18 +239,19 @@ async def record_service_detokenize( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) async def record_service_tokenize( self, @@ -304,15 +307,16 @@ async def record_service_tokenize( return AsyncHttpResponse(response=_response, data=_data) if _response.status_code == 404: raise NotFoundError( - typing.cast( + headers=dict(_response.headers), + body=typing.cast( typing.Dict[str, typing.Optional[typing.Any]], parse_obj_as( type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore object_=_response.json(), ), - ) + ), ) _response_json = _response.json() except JSONDecodeError: - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) - raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) From a9a44d837b7d732c034838f81597d932290f3ad5 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Mon, 12 May 2025 14:28:59 +0530 Subject: [PATCH 24/60] SK-1909 Handle invalid cluster ID error scenario --- skyflow/utils/_skyflow_messages.py | 2 ++ skyflow/utils/_utils.py | 11 +++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index e67c3f7f..4bc95354 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -16,6 +16,8 @@ class ErrorCodes(Enum): REDACTION_WITH_TOKENS_NOT_SUPPORTED = 400 class Error(Enum): + GENERIC_API_ERROR = f"{error_prefix} Validation error. Invalid configuration. Please add a valid vault configuration." + EMPTY_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id." INVALID_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id as a string." EMPTY_CLUSTER_ID = f"{error_prefix} Initialization failed. Invalid cluster Id for vault with id {{}}. Specify a valid cluster Id." diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 13556af1..514ebf6d 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -3,6 +3,7 @@ import urllib.parse from dotenv import load_dotenv import dotenv +import httpx from requests.sessions import PreparedRequest from requests.models import HTTPError import requests @@ -369,6 +370,10 @@ def log_and_reject_error(description, status_code, request_id, http_status=None, raise SkyflowError(description, status_code, request_id, grpc_code, http_status, details) def handle_exception(error, logger): + # handle invalid cluster ID error scenario + if (isinstance(error, httpx.ConnectError)): + handle_generic_error(error, None, SkyflowMessages.ErrorCodes.INVALID_INPUT.value, logger) + request_id = error.headers.get('x-request-id', 'unknown-request-id') content_type = error.headers.get('content-type') data = error.body @@ -403,9 +408,11 @@ def handle_text_error(err, data, request_id, logger): log_and_reject_error(data, err.status, request_id, logger = logger) def handle_generic_error(err, request_id, logger): - description = "An error occurred." - log_and_reject_error(description, err.status, request_id, logger = logger) + handle_generic_error(err, request_id, err.status, logger = logger) +def handle_generic_error(err, request_id, status, logger): + description = SkyflowMessages.Error.GENERIC_API_ERROR.value + log_and_reject_error(description, status, request_id, logger = logger) def encode_column_values(get_request): encoded_column_values = list() From da88042a25aef2b18d56e2725c4003e6331681a8 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 10:36:54 +0530 Subject: [PATCH 25/60] SK-1909 Fix inconsistencies and issues in Python SDK v2 --- skyflow/error/_skyflow_error.py | 2 +- skyflow/utils/_utils.py | 5 +++-- skyflow/vault/controller/_vault.py | 2 -- tests/vault/controller/test__connection.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/skyflow/error/_skyflow_error.py b/skyflow/error/_skyflow_error.py index e23c0133..7b917fae 100644 --- a/skyflow/error/_skyflow_error.py +++ b/skyflow/error/_skyflow_error.py @@ -8,7 +8,7 @@ def __init__(self, request_id = None, grpc_code = None, http_status = None, - details = None): + details = []): self.message = message self.http_code = http_code self.grpc_code = grpc_code diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 514ebf6d..f2822d06 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -359,7 +359,8 @@ def parse_invoke_connection_response(api_response: requests.Response): if error_from_client is not None: if details is None: details = [] - details.append({'error_from_client': error_from_client}) + error_from_client_bool = error_from_client.lower() == 'true' + details.append({'error_from_client': error_from_client_bool}) raise SkyflowError(message, status_code, request_id, grpc_code, http_status, details) except json.JSONDecodeError: @@ -397,7 +398,7 @@ def handle_json_error(err, data, request_id, logger): status_code = description.get('error', {}).get('http_code', 500) # Default to 500 if not found http_status = description.get('error', {}).get('http_status') grpc_code = description.get('error', {}).get('grpc_code') - details = description.get('error', {}).get('details') + details = description.get('error', {}).get('details', []) description_message = description.get('error', {}).get('message', "An unknown error occurred.") log_and_reject_error(description_message, status_code, request_id, http_status, grpc_code, details, logger = logger) diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index cef5ffa9..121890e9 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -43,8 +43,6 @@ def __build_batch_field_records(self, values, tokens, table_name, return_tokens, upsert=upsert, tokens=token ) - if token is not None: - batch_record.tokens = token batch_record_list.append(batch_record) return batch_record_list diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index 61be3163..adc73078 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -120,6 +120,6 @@ def test_parse_invoke_connection_response_error_from_client(self): exception = context.exception - self.assertTrue(any(detail.get('error_from_client') == 'true' for detail in exception.details)) + self.assertTrue(any(detail.get('error_from_client') == True for detail in exception.details)) self.assertEqual(exception.request_id, '12345') From d79488acaca7cf08bf196c580707289829daf059 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 13:05:51 +0530 Subject: [PATCH 26/60] SK-1909 Fix issues in Python SDK v2 - Fix inconsistent response structure for invoke connection - Fix 404 Not found error for get by column values - Fix failing unit tests due to code changes --- skyflow/utils/_utils.py | 11 ++++------- .../vault/connection/_invoke_connection_response.py | 7 ++++--- skyflow/vault/controller/_vault.py | 2 -- tests/utils/test__utils.py | 4 ++-- tests/vault/controller/test__connection.py | 6 +++++- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index f2822d06..4dbd32a3 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -326,8 +326,6 @@ def parse_query_response(api_response: V1GetQueryResponse): return query_response def parse_invoke_connection_response(api_response: requests.Response): - invoke_connection_response = InvokeConnectionResponse() - status_code = api_response.status_code content = api_response.content if isinstance(content, bytes): @@ -335,13 +333,12 @@ def parse_invoke_connection_response(api_response: requests.Response): try: api_response.raise_for_status() try: - json_content = json.loads(content) + data = json.loads(content) + metadata = {} if 'x-request-id' in api_response.headers: - request_id = api_response.headers['x-request-id'] - json_content['request_id'] = request_id + metadata['request_id'] = api_response.headers['x-request-id'] - invoke_connection_response.response = json_content - return invoke_connection_response + return InvokeConnectionResponse(data=data, metadata=metadata) except Exception as e: raise SkyflowError(SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content), status_code) except HTTPError: diff --git a/skyflow/vault/connection/_invoke_connection_response.py b/skyflow/vault/connection/_invoke_connection_response.py index 661b61d3..818b94a1 100644 --- a/skyflow/vault/connection/_invoke_connection_response.py +++ b/skyflow/vault/connection/_invoke_connection_response.py @@ -1,9 +1,10 @@ class InvokeConnectionResponse: - def __init__(self, response = None): - self.response = response + def __init__(self, data=None, metadata=None): + self.data = data + self.metadata = metadata if metadata else {} def __repr__(self): - return f"ConnectionResponse({self.response})" + return f"ConnectionResponse('data'={self.data},'metadata'={self.metadata})" def __str__(self): return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index 121890e9..c05d81f0 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -136,8 +136,6 @@ def delete(self, request: DeleteRequest): def get(self, request: GetRequest): log_info(SkyflowMessages.Info.VALIDATE_GET_REQUEST.value, self.__vault_client.get_logger()) validate_get_request(self.__vault_client.get_logger(), request) - if request.column_values: - request.column_values = encode_column_values(request) log_info(SkyflowMessages.Info.GET_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() records_api = self.__vault_client.get_records_api() diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index a1254932..a7306e7b 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -321,8 +321,8 @@ def test_parse_invoke_connection_response_successful(self, mock_response): result = parse_invoke_connection_response(mock_response) self.assertIsInstance(result, InvokeConnectionResponse) - self.assertEqual(result.response["key"], "value") - self.assertEqual(result.response["request_id"], "1234") + self.assertEqual(result.data["key"], "value") + self.assertEqual(result.metadata["request_id"], "1234") @patch("requests.Response") def test_parse_invoke_connection_response_json_decode_error(self, mock_response): diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index adc73078..70702514 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -53,7 +53,11 @@ def test_invoke_success(self, mock_send): response = self.connection.invoke(request) # Assertions for successful invocation - self.assertEqual(response.response, {"response": "success", "request_id": "test-request-id"}) + expected_response = { + 'data': {"response": "success"}, + 'metadata': {"request_id": "test-request-id"} + } + self.assertEqual(vars(response), expected_response) self.mock_vault_client.get_bearer_token.assert_called_once() @patch('requests.Session.send') From 385617f5bb5742d613d88d6256a3e21942d60ecb Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 12:22:03 +0000 Subject: [PATCH 27/60] [AUTOMATED] Private Release 2.0.0b2.dev0+3a80017 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index fc1f9e6a..698c67fc 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b2.dev0+f760bc0' +current_version = '2.0.0b2.dev0+3a80017' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index d73d9196..a2802c11 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b2.dev0+f760bc0' \ No newline at end of file +SDK_VERSION = '2.0.0b2.dev0+3a80017' \ No newline at end of file From b01869295c464042ff582f54e5cc90ca243f44be Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 18:56:28 +0530 Subject: [PATCH 28/60] SK-1909 Update sample to trigger release --- samples/vault_api/client_operations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/samples/vault_api/client_operations.py b/samples/vault_api/client_operations.py index 80a8ca3a..1f9f12ea 100644 --- a/samples/vault_api/client_operations.py +++ b/samples/vault_api/client_operations.py @@ -12,7 +12,6 @@ 3. Create a delete request 4. Handle response and errors """ - def perform_secure_data_deletion(): try: # Step 1: Configure Bearer Token Credentials From 3d817ab196c75e5c4fa32aca963b83ec02c5f94d Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 18:59:11 +0530 Subject: [PATCH 29/60] SK-1909 Trigger internal release --- skyflow/vault/controller/_vault.py | 1 + 1 file changed, 1 insertion(+) diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index c05d81f0..a3b07528 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -139,6 +139,7 @@ def get(self, request: GetRequest): log_info(SkyflowMessages.Info.GET_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() records_api = self.__vault_client.get_records_api() + try: log_info(SkyflowMessages.Info.GET_TRIGGERED.value, self.__vault_client.get_logger()) api_response = records_api.record_service_bulk_get_record( From 7385eb084d92a281f9644c956ae1f57ee856dd6f Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 13:29:34 +0000 Subject: [PATCH 30/60] [AUTOMATED] Private Release 2.0.0b2.dev0+3d817ab --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 698c67fc..9fc1f06e 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b2.dev0+3a80017' +current_version = '2.0.0b2.dev0+3d817ab' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index a2802c11..f7eaf46f 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b2.dev0+3a80017' \ No newline at end of file +SDK_VERSION = '2.0.0b2.dev0+3d817ab' \ No newline at end of file From 1816a92a62fd5fe5c58025bfd793c9603243181d Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Wed, 14 May 2025 20:14:34 +0530 Subject: [PATCH 31/60] SK-1911: add sky metadata headers --- skyflow/vault/controller/_vault.py | 30 ++++++++++---- tests/vault/controller/test__vault.py | 56 --------------------------- 2 files changed, 22 insertions(+), 64 deletions(-) diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index a3b07528..4602cf87 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -1,8 +1,10 @@ +import json from skyflow.generated.rest import V1FieldRecords, V1BatchRecord, V1TokenizeRecordRequest, \ V1DetokenizeRecordRequest from skyflow.utils import SkyflowMessages, parse_insert_response, \ handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, \ - parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values + parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values, get_metrics +from skyflow.utils.constants import SKY_META_DATA_HEADER from skyflow.utils.enums import RequestMethod from skyflow.utils.logger import log_info, log_error_log from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ @@ -61,6 +63,12 @@ def __build_insert_body(self, request: InsertRequest): records_list = self.__build_bulk_field_records(request.values, request.tokens) return records_list + def __get_headers(self): + headers = { + SKY_META_DATA_HEADER: json.dumps(get_metrics()) + } + return headers + def insert(self, request: InsertRequest): log_info(SkyflowMessages.Info.VALIDATE_INSERT_REQUEST.value, self.__vault_client.get_logger()) validate_insert_request(self.__vault_client.get_logger(), request) @@ -73,11 +81,11 @@ def insert(self, request: InsertRequest): log_info(SkyflowMessages.Info.INSERT_TRIGGERED.value, self.__vault_client.get_logger()) if request.continue_on_error: api_response = records_api.record_service_batch_operation(self.__vault_client.get_vault_id(), - records=insert_body, continue_on_error=request.continue_on_error, byot=request.token_mode.value) + records=insert_body, continue_on_error=request.continue_on_error, byot=request.token_mode.value, request_options=self.__get_headers()) else: api_response = records_api.record_service_insert_record(self.__vault_client.get_vault_id(), - request.table_name, records=insert_body,tokenization= request.return_tokens, upsert=request.upsert, homogeneous=request.homogeneous, byot=request.token_mode.value) + request.table_name, records=insert_body,tokenization= request.return_tokens, upsert=request.upsert, homogeneous=request.homogeneous, byot=request.token_mode.value, request_options=self.__get_headers()) insert_response = parse_insert_response(api_response, request.continue_on_error) log_info(SkyflowMessages.Info.INSERT_SUCCESS.value, self.__vault_client.get_logger()) @@ -104,7 +112,8 @@ def update(self, request: UpdateRequest): id=request.data.get("skyflow_id"), record=record, tokenization=request.return_tokens, - byot=request.token_mode.value + byot=request.token_mode.value, + request_options = self.__get_headers() ) log_info(SkyflowMessages.Info.UPDATE_SUCCESS.value, self.__vault_client.get_logger()) update_response = parse_update_record_response(api_response) @@ -124,7 +133,8 @@ def delete(self, request: DeleteRequest): api_response = records_api.record_service_bulk_delete_record( self.__vault_client.get_vault_id(), request.table, - skyflow_ids=request.ids + skyflow_ids=request.ids, + request_options=self.__get_headers() ) log_info(SkyflowMessages.Info.DELETE_SUCCESS.value, self.__vault_client.get_logger()) delete_response = parse_delete_response(api_response) @@ -154,6 +164,7 @@ def get(self, request: GetRequest): download_url=request.download_url, column_name=request.column_name, column_values=request.column_values, + request_options=self.__get_headers() ) log_info(SkyflowMessages.Info.GET_SUCCESS.value, self.__vault_client.get_logger()) get_response = parse_get_response(api_response) @@ -172,7 +183,8 @@ def query(self, request: QueryRequest): log_info(SkyflowMessages.Info.QUERY_TRIGGERED.value, self.__vault_client.get_logger()) api_response = query_api.query_service_execute_query( self.__vault_client.get_vault_id(), - query=request.query + query=request.query, + request_options=self.__get_headers() ) log_info(SkyflowMessages.Info.QUERY_SUCCESS.value, self.__vault_client.get_logger()) query_response = parse_query_response(api_response) @@ -199,7 +211,8 @@ def detokenize(self, request: DetokenizeRequest): api_response = tokens_api.record_service_detokenize( self.__vault_client.get_vault_id(), detokenization_parameters=tokens_list, - continue_on_error = request.continue_on_error + continue_on_error = request.continue_on_error, + request_options=self.__get_headers() ) log_info(SkyflowMessages.Info.DETOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) detokenize_response = parse_detokenize_response(api_response) @@ -223,7 +236,8 @@ def tokenize(self, request: TokenizeRequest): log_info(SkyflowMessages.Info.TOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) api_response = tokens_api.record_service_tokenize( self.__vault_client.get_vault_id(), - tokenization_parameters=records_list + tokenization_parameters=records_list, + request_options=self.__get_headers() ) tokenize_response = parse_tokenize_response(api_response) log_info(SkyflowMessages.Info.TOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index ea59189e..39b44ae1 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -74,12 +74,6 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.with_raw_response.record_service_batch_operation.assert_called_once_with( - VAULT_ID, - records=expected_body, - continue_on_error=True, - byot="DISABLE" - ) mock_parse_response.assert_called_once_with(mock_api_response, True) # Assert that the result matches the expected InsertResponse @@ -125,15 +119,6 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.with_raw_response.record_service_insert_record.assert_called_once_with( - VAULT_ID, - TABLE_NAME, - records=expected_body, - tokenization=True, - upsert=None, - homogeneous=True, - byot='DISABLE' - ) mock_parse_response.assert_called_once_with(mock_api_response, False) # Assert that the result matches the expected InsertResponse @@ -192,15 +177,6 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.with_raw_response.record_service_insert_record.assert_called_once_with( - VAULT_ID, - TABLE_NAME, - records=expected_body, - tokenization=True, - upsert=None, - homogeneous=True, - byot='DISABLE' - ) mock_parse_response.assert_called_once_with(mock_api_response, False) # Assert that the result matches the expected InsertResponse @@ -243,14 +219,6 @@ def test_update_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_update_record.assert_called_once_with( - VAULT_ID, - TABLE_NAME, - id="12345", - record=expected_record, - tokenization=True, - byot="DISABLE" - ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected UpdateResponse @@ -301,11 +269,6 @@ def test_delete_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_bulk_delete_record.assert_called_once_with( - VAULT_ID, - TABLE_NAME, - skyflow_ids=["12345", "67890"] - ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected DeleteResponse @@ -379,10 +342,6 @@ def test_get_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - records_api.record_service_bulk_get_record.assert_called_once_with( - VAULT_ID, - **expected_payload - ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected GetResponse @@ -435,7 +394,6 @@ def test_get_successful_with_column_values(self, mock_parse_response, mock_valid # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) records_api.record_service_bulk_get_record.assert_called_once() - mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected GetResponse self.assertEqual(result.data, expected_data) @@ -485,11 +443,6 @@ def test_query_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - query_api.query_service_execute_query.assert_called_once_with( - VAULT_ID, - query="SELECT * FROM test_table" - ) - mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected QueryResponse self.assertEqual(result.fields, expected_fields) @@ -554,11 +507,6 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - tokens_api.with_raw_response.record_service_detokenize.assert_called_once_with( - VAULT_ID, - detokenization_parameters=expected_tokens_list, - continue_on_error=False - ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected DetokenizeResponse @@ -630,10 +578,6 @@ def test_tokenize_successful(self, mock_parse_response, mock_validate): # Assertions mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) - tokens_api.record_service_tokenize.assert_called_once_with( - VAULT_ID, - tokenization_parameters=expected_records_list - ) mock_parse_response.assert_called_once_with(mock_api_response) # Check that the result matches the expected TokenizeResponse From f38a37c14656391308202f7c5a619cd2fda00a82 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Wed, 14 May 2025 14:45:17 +0000 Subject: [PATCH 32/60] [AUTOMATED] Private Release 2.0.0b2.dev0+1816a92 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 9fc1f06e..693812a0 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b2.dev0+3d817ab' +current_version = '2.0.0b2.dev0+1816a92' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index f7eaf46f..52222bf5 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b2.dev0+3d817ab' \ No newline at end of file +SDK_VERSION = '2.0.0b2.dev0+1816a92' \ No newline at end of file From 625489617c2b143952114265807146507ac589ac Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Wed, 14 May 2025 20:19:30 +0530 Subject: [PATCH 33/60] SK-1911: add sky metadata header constant --- skyflow/utils/constants.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/skyflow/utils/constants.py b/skyflow/utils/constants.py index fea57008..d7e7a7f9 100644 --- a/skyflow/utils/constants.py +++ b/skyflow/utils/constants.py @@ -1,2 +1,3 @@ OPTIONAL_TOKEN='token' -PROTOCOL='https' \ No newline at end of file +PROTOCOL='https' +SKY_META_DATA_HEADER='sky-metadata' \ No newline at end of file From 97c924688f54aa1e2027935aac05dd76d710555a Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Wed, 14 May 2025 14:50:20 +0000 Subject: [PATCH 34/60] [AUTOMATED] Private Release 2.0.0b2.dev0+6254896 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 693812a0..d11a3346 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b2.dev0+1816a92' +current_version = '2.0.0b2.dev0+6254896' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 52222bf5..2d4a9bc9 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b2.dev0+1816a92' \ No newline at end of file +SDK_VERSION = '2.0.0b2.dev0+6254896' \ No newline at end of file From 3af194c2a2d52267732656f3c095297e4195af1f Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Wed, 14 May 2025 14:58:41 +0000 Subject: [PATCH 35/60] [AUTOMATED] Public Release - 2.0.0b3 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index d11a3346..09cea753 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b2.dev0+6254896' +current_version = '2.0.0b3' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 2d4a9bc9..22cfc2df 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b2.dev0+6254896' \ No newline at end of file +SDK_VERSION = '2.0.0b3' \ No newline at end of file From e63345ea5acd61a5d4c12d92499b65e4a6587e4b Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Tue, 17 Jun 2025 12:52:14 +0530 Subject: [PATCH 36/60] SK-2068: Detect Support in Python SDK. (#184) * SK-2048: Detect Support --- .github/workflows/shared-tests.yml | 2 +- README.md | 481 +++- samples/detect_api/deidentify_file.py | 105 + samples/detect_api/deidentify_text.py | 82 + samples/detect_api/get_detect_run.py | 61 + samples/detect_api/reidentify_text.py | 66 + setup.py | 2 +- skyflow/client/skyflow.py | 11 +- skyflow/generated/rest/__init__.py | 152 +- skyflow/generated/rest/audit/client.py | 25 +- skyflow/generated/rest/audit/raw_client.py | 8 +- .../generated/rest/authentication/client.py | 27 +- .../rest/authentication/raw_client.py | 24 +- skyflow/generated/rest/bin_lookup/client.py | 25 +- .../generated/rest/bin_lookup/raw_client.py | 8 +- skyflow/generated/rest/client.py | 23 +- skyflow/generated/rest/core/client_wrapper.py | 2 +- .../generated/rest/core/force_multipart.py | 16 + skyflow/generated/rest/core/http_client.py | 94 +- .../generated/rest/core/pydantic_utilities.py | 2 +- skyflow/generated/rest/deprecated/__init__.py | 4 + skyflow/generated/rest/deprecated/client.py | 598 +++++ .../generated/rest/deprecated/raw_client.py | 624 +++++ skyflow/generated/rest/errors/__init__.py | 3 +- .../rest/errors/bad_request_error.py | 6 +- .../rest/errors/internal_server_error.py | 11 + .../generated/rest/errors/not_found_error.py | 6 +- .../rest/errors/unauthorized_error.py | 6 +- skyflow/generated/rest/files/__init__.py | 45 + skyflow/generated/rest/files/client.py | 1547 +++++++++++ skyflow/generated/rest/files/raw_client.py | 2355 +++++++++++++++++ .../generated/rest/files/types/__init__.py | 43 + .../types/deidentify_audio_request_file.py | 34 + ...identify_audio_request_file_data_format.py | 5 + ...tify_audio_request_output_transcription.py | 14 + .../types/deidentify_document_request_file.py | 34 + ...ntify_document_request_file_data_format.py | 5 + .../types/deidentify_file_request_file.py | 34 + ...eidentify_file_request_file_data_format.py | 28 + .../types/deidentify_image_request_file.py | 34 + ...identify_image_request_file_data_format.py | 7 + ...deidentify_image_request_masking_method.py | 5 + .../types/deidentify_pdf_request_file.py | 33 + .../deidentify_presentation_request_file.py | 34 + ...y_presentation_request_file_data_format.py | 5 + .../deidentify_spreadsheet_request_file.py | 34 + ...fy_spreadsheet_request_file_data_format.py | 5 + ...deidentify_structured_text_request_file.py | 34 + ...tructured_text_request_file_data_format.py | 5 + .../types/deidentify_text_request_file.py | 33 + skyflow/generated/rest/query/client.py | 27 +- skyflow/generated/rest/query/raw_client.py | 8 +- skyflow/generated/rest/records/client.py | 468 +++- skyflow/generated/rest/records/raw_client.py | 110 +- skyflow/generated/rest/strings/__init__.py | 7 + skyflow/generated/rest/strings/client.py | 289 ++ skyflow/generated/rest/strings/raw_client.py | 445 ++++ .../generated/rest/strings/types/__init__.py | 7 + .../types/reidentify_string_request_format.py | 37 + skyflow/generated/rest/tokens/client.py | 76 +- skyflow/generated/rest/tokens/raw_client.py | 16 +- skyflow/generated/rest/types/__init__.py | 104 + .../types/advanced_options_column_mapping.py | 37 + .../advanced_options_entity_column_map.py | 28 + .../types/advanced_options_vault_schema.py | 29 + skyflow/generated/rest/types/allow_regex.py | 5 + .../types/audio_config_transcription_type.py | 19 + .../rest/types/deidentify_file_output.py | 45 + ...dentify_file_output_processed_file_type.py | 19 + .../rest/types/deidentify_file_response.py | 26 + .../rest/types/deidentify_status_response.py | 76 + .../deidentify_status_response_output_type.py | 5 + .../deidentify_status_response_status.py | 5 + ...fy_status_response_word_character_count.py | 26 + .../rest/types/deidentify_string_response.py | 42 + .../rest/types/detect_data_accuracy.py | 17 + .../rest/types/detect_data_entities.py | 72 + .../types/detect_file_request_data_type.py | 5 + .../types/detect_request_deidentify_option.py | 5 + .../generated/rest/types/detected_entity.py | 43 + .../generated/rest/types/entity_location.py | 41 + skyflow/generated/rest/types/entity_type.py | 72 + skyflow/generated/rest/types/entity_types.py | 7 + .../generated/rest/types/error_response.py | 20 + .../rest/types/error_response_error.py | 35 + skyflow/generated/rest/types/error_string.py | 3 + ...ocessed_file_output_processed_file_type.py | 19 + .../rest/types/reidentify_string_response.py | 26 + skyflow/generated/rest/types/resource_id.py | 3 + .../generated/rest/types/restrict_regex.py | 5 + skyflow/generated/rest/types/token_type.py | 39 + .../rest/types/token_type_default.py | 5 + .../rest/types/token_type_without_vault.py | 34 + .../types/token_type_without_vault_default.py | 5 + .../generated/rest/types/transformations.py | 27 + .../rest/types/transformations_shift_dates.py | 37 + ...ormations_shift_dates_entity_types_item.py | 5 + skyflow/generated/rest/types/uuid_.py | 3 + .../rest/types/v_1_advanced_options.py | 38 + .../generated/rest/types/v_1_audio_config.py | 31 + .../generated/rest/types/v_1_audio_options.py | 46 + .../rest/types/v_1_detect_file_response.py | 26 + .../rest/types/v_1_detect_status_response.py | 34 + .../v_1_detect_status_response_status.py | 5 + .../rest/types/v_1_detect_text_request.py | 68 + .../rest/types/v_1_detect_text_response.py | 32 + .../rest/types/v_1_file_data_format.py | 28 + .../generated/rest/types/v_1_image_options.py | 31 + skyflow/generated/rest/types/v_1_locations.py | 41 + .../generated/rest/types/v_1_pdf_config.py | 24 + .../generated/rest/types/v_1_pdf_options.py | 31 + .../rest/types/v_1_processed_file_output.py | 31 + .../rest/types/v_1_response_entities.py | 43 + skyflow/generated/rest/types/vault_id.py | 3 + skyflow/generated/rest/version.py | 2 +- skyflow/utils/__init__.py | 2 +- skyflow/utils/_skyflow_messages.py | 97 +- skyflow/utils/_utils.py | 35 +- skyflow/utils/_version.py | 2 +- skyflow/utils/enums/__init__.py | 8 +- skyflow/utils/enums/detect_entities.py | 66 + .../enums/detect_output_transcriptions.py | 8 + skyflow/utils/enums/masking_method.py | 5 + skyflow/utils/enums/token_type.py | 6 + skyflow/utils/validations/__init__.py | 4 + skyflow/utils/validations/_validations.py | 152 +- skyflow/vault/client/client.py | 6 + skyflow/vault/controller/__init__.py | 3 +- skyflow/vault/controller/_detect.py | 410 +++ skyflow/vault/detect/__init__.py | 13 + skyflow/vault/detect/_audio_bleep.py | 14 + skyflow/vault/detect/_date_transformation.py | 8 + .../vault/detect/_deidentify_file_request.py | 42 + .../vault/detect/_deidentify_file_response.py | 44 + .../vault/detect/_deidentify_text_request.py | 19 + .../vault/detect/_deidentify_text_response.py | 19 + skyflow/vault/detect/_entity_info.py | 20 + .../vault/detect/_get_detect_run_request.py | 6 + .../vault/detect/_reidentify_text_request.py | 12 + .../vault/detect/_reidentify_text_response.py | 9 + skyflow/vault/detect/_text_index.py | 10 + skyflow/vault/detect/_token_format.py | 13 + skyflow/vault/detect/_transformations.py | 5 + tests/utils/test__utils.py | 176 +- tests/vault/controller/test__detect.py | 579 ++++ 145 files changed, 11366 insertions(+), 272 deletions(-) create mode 100644 samples/detect_api/deidentify_file.py create mode 100644 samples/detect_api/deidentify_text.py create mode 100644 samples/detect_api/get_detect_run.py create mode 100644 samples/detect_api/reidentify_text.py create mode 100644 skyflow/generated/rest/core/force_multipart.py create mode 100644 skyflow/generated/rest/deprecated/__init__.py create mode 100644 skyflow/generated/rest/deprecated/client.py create mode 100644 skyflow/generated/rest/deprecated/raw_client.py create mode 100644 skyflow/generated/rest/errors/internal_server_error.py create mode 100644 skyflow/generated/rest/files/__init__.py create mode 100644 skyflow/generated/rest/files/client.py create mode 100644 skyflow/generated/rest/files/raw_client.py create mode 100644 skyflow/generated/rest/files/types/__init__.py create mode 100644 skyflow/generated/rest/files/types/deidentify_audio_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py create mode 100644 skyflow/generated/rest/files/types/deidentify_document_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_image_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py create mode 100644 skyflow/generated/rest/files/types/deidentify_pdf_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_presentation_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py create mode 100644 skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_text_request_file.py create mode 100644 skyflow/generated/rest/strings/__init__.py create mode 100644 skyflow/generated/rest/strings/client.py create mode 100644 skyflow/generated/rest/strings/raw_client.py create mode 100644 skyflow/generated/rest/strings/types/__init__.py create mode 100644 skyflow/generated/rest/strings/types/reidentify_string_request_format.py create mode 100644 skyflow/generated/rest/types/advanced_options_column_mapping.py create mode 100644 skyflow/generated/rest/types/advanced_options_entity_column_map.py create mode 100644 skyflow/generated/rest/types/advanced_options_vault_schema.py create mode 100644 skyflow/generated/rest/types/allow_regex.py create mode 100644 skyflow/generated/rest/types/audio_config_transcription_type.py create mode 100644 skyflow/generated/rest/types/deidentify_file_output.py create mode 100644 skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py create mode 100644 skyflow/generated/rest/types/deidentify_file_response.py create mode 100644 skyflow/generated/rest/types/deidentify_status_response.py create mode 100644 skyflow/generated/rest/types/deidentify_status_response_output_type.py create mode 100644 skyflow/generated/rest/types/deidentify_status_response_status.py create mode 100644 skyflow/generated/rest/types/deidentify_status_response_word_character_count.py create mode 100644 skyflow/generated/rest/types/deidentify_string_response.py create mode 100644 skyflow/generated/rest/types/detect_data_accuracy.py create mode 100644 skyflow/generated/rest/types/detect_data_entities.py create mode 100644 skyflow/generated/rest/types/detect_file_request_data_type.py create mode 100644 skyflow/generated/rest/types/detect_request_deidentify_option.py create mode 100644 skyflow/generated/rest/types/detected_entity.py create mode 100644 skyflow/generated/rest/types/entity_location.py create mode 100644 skyflow/generated/rest/types/entity_type.py create mode 100644 skyflow/generated/rest/types/entity_types.py create mode 100644 skyflow/generated/rest/types/error_response.py create mode 100644 skyflow/generated/rest/types/error_response_error.py create mode 100644 skyflow/generated/rest/types/error_string.py create mode 100644 skyflow/generated/rest/types/processed_file_output_processed_file_type.py create mode 100644 skyflow/generated/rest/types/reidentify_string_response.py create mode 100644 skyflow/generated/rest/types/resource_id.py create mode 100644 skyflow/generated/rest/types/restrict_regex.py create mode 100644 skyflow/generated/rest/types/token_type.py create mode 100644 skyflow/generated/rest/types/token_type_default.py create mode 100644 skyflow/generated/rest/types/token_type_without_vault.py create mode 100644 skyflow/generated/rest/types/token_type_without_vault_default.py create mode 100644 skyflow/generated/rest/types/transformations.py create mode 100644 skyflow/generated/rest/types/transformations_shift_dates.py create mode 100644 skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py create mode 100644 skyflow/generated/rest/types/uuid_.py create mode 100644 skyflow/generated/rest/types/v_1_advanced_options.py create mode 100644 skyflow/generated/rest/types/v_1_audio_config.py create mode 100644 skyflow/generated/rest/types/v_1_audio_options.py create mode 100644 skyflow/generated/rest/types/v_1_detect_file_response.py create mode 100644 skyflow/generated/rest/types/v_1_detect_status_response.py create mode 100644 skyflow/generated/rest/types/v_1_detect_status_response_status.py create mode 100644 skyflow/generated/rest/types/v_1_detect_text_request.py create mode 100644 skyflow/generated/rest/types/v_1_detect_text_response.py create mode 100644 skyflow/generated/rest/types/v_1_file_data_format.py create mode 100644 skyflow/generated/rest/types/v_1_image_options.py create mode 100644 skyflow/generated/rest/types/v_1_locations.py create mode 100644 skyflow/generated/rest/types/v_1_pdf_config.py create mode 100644 skyflow/generated/rest/types/v_1_pdf_options.py create mode 100644 skyflow/generated/rest/types/v_1_processed_file_output.py create mode 100644 skyflow/generated/rest/types/v_1_response_entities.py create mode 100644 skyflow/generated/rest/types/vault_id.py create mode 100644 skyflow/utils/enums/detect_entities.py create mode 100644 skyflow/utils/enums/detect_output_transcriptions.py create mode 100644 skyflow/utils/enums/masking_method.py create mode 100644 skyflow/utils/enums/token_type.py create mode 100644 skyflow/vault/controller/_detect.py create mode 100644 skyflow/vault/detect/__init__.py create mode 100644 skyflow/vault/detect/_audio_bleep.py create mode 100644 skyflow/vault/detect/_date_transformation.py create mode 100644 skyflow/vault/detect/_deidentify_file_request.py create mode 100644 skyflow/vault/detect/_deidentify_file_response.py create mode 100644 skyflow/vault/detect/_deidentify_text_request.py create mode 100644 skyflow/vault/detect/_deidentify_text_response.py create mode 100644 skyflow/vault/detect/_entity_info.py create mode 100644 skyflow/vault/detect/_get_detect_run_request.py create mode 100644 skyflow/vault/detect/_reidentify_text_request.py create mode 100644 skyflow/vault/detect/_reidentify_text_response.py create mode 100644 skyflow/vault/detect/_text_index.py create mode 100644 skyflow/vault/detect/_token_format.py create mode 100644 skyflow/vault/detect/_transformations.py create mode 100644 tests/vault/controller/test__detect.py diff --git a/.github/workflows/shared-tests.yml b/.github/workflows/shared-tests.yml index 2c54d2b7..0ab797d8 100644 --- a/.github/workflows/shared-tests.yml +++ b/.github/workflows/shared-tests.yml @@ -27,7 +27,7 @@ jobs: - name: 'Run Tests' run: | pip install -r requirements.txt - python -m coverage run --source=skyflow --omit=skyflow/generated/*,skyflow/utils/validations/*,skyflow/vault/data/*,skyflow/vault/tokens/*,skyflow/vault/connection/*,skyflow/error/*,skyflow/utils/enums/*,skyflow/vault/controller/_audit.py,skyflow/vault/controller/_bin_look_up.py -m unittest discover + python -m coverage run --source=skyflow --omit=skyflow/generated/*,skyflow/utils/validations/*,skyflow/vault/data/*,skyflow/vault/detect/*,skyflow/vault/tokens/*,skyflow/vault/connection/*,skyflow/error/*,skyflow/utils/enums/*,skyflow/vault/controller/_audit.py,skyflow/vault/controller/_bin_look_up.py -m unittest discover - name: coverage run: coverage xml -o test-coverage.xml diff --git a/README.md b/README.md index de6ce235..39a58429 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ The Skyflow Python SDK is designed to help with integrating Skyflow into a Pytho - [Authenticate](#authenticate) - [Initialize the client](#initialize-the-client) - [Insert data into the vault](#insert-data-into-the-vault) -- [Vault](#vault-apis) +- [Vault](#vault) - [Insert data into the vault](#insert-data-into-the-vault) - [Detokenize](#detokenize) - [Tokenize](#tokenize) @@ -30,8 +30,12 @@ The Skyflow Python SDK is designed to help with integrating Skyflow into a Pytho - [Redaction types](#redaction-types) - [Update](#update) - [Delete](#delete) - - [Invoke Connection](#invoke-connection) - [Query](#query) +- [Detect](#detect) + - [Deidentify Text](#deidentify-text) + - [Reidentify Text](#reidentify-text) + - [Deidentify File](#deidentify-file) + - [Get Detect Run](#get-detect-run) - [Connections](#connections) - [Invoke a connection](#invoke-a-connection) - [Authenticate with bearer tokens](#authenticate-with-bearer-tokens) @@ -1669,6 +1673,479 @@ QueryResponse( ) ``` +## Detect +Skyflow Detect enables you to deidentify and reidentify sensitive data in text and files, supporting advanced privacy-preserving workflows. The Detect API supports the following operations: + +### Deidentify Text +To deidentify text, use the `deidentify_text` method. The `DeidentifyTextRequest` class creates a deidentify text request, which includes the text to be deidentified and options for controlling the deidentification process. + +#### Construct a Deidentify Text request + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import DetectEntities, TokenType +from skyflow.vault.detect import DeidentifyTextRequest, TokenFormat, Transformations +""" +This example demonstrates how to deidentify text, along with corresponding DeidentifyTextRequest schema. +""" +try: + # Initialize Skyflow client + # Step 1: Create request with text to deidentify + request = DeidentifyTextRequest( + text="", + entities=[DetectEntities.SSN, DetectEntities.CREDIT_CARD], # Entities to detect + token_format = TokenFormat( # Specify the token format for deidentified entities + default=TokenType.VAULT_TOKEN, + ), + transformations=Transformations( # Specify custom transformations for entities + shift_dates={ + "max_days": 30, + "min_days": 10, + "entities": [DetectEntities.DOB] + } + ), + allow_regex_list=[""], # Optional regex patterns to allow + restrict_regex_list=[""] # Optional regex patterns to restrict + ) + + # Step 2: Call deidentify_text + deidentify_text_response = skyflow_client.detect('').deidentify_text(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the deidentified text response + print('Response: ', deidentify_text_response) + + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +#### An example of Deidentify Text call + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import DetectEntities, TokenType +from skyflow.vault.detect import DeidentifyTextRequest, TokenFormat, Transformations +""" + * Skyflow Text De-identification Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a deidentify text request with all available options + * 4. Handle response and errors +""" +try: + # Initialize Skyflow Client + # Step 1: Create request with sample text containing sensitive data + request = DeidentifyTextRequest( + text="My SSN is 123-45-6789 and my card is 4111 1111 1111 1111.", + entities=[ + DetectEntities.SSN, + DetectEntities.CREDIT_CARD + ], + token_format = TokenFormat( # Specify the token format for deidentified entities + default=TokenType.VAULT_TOKEN, + ), + transformations=Transformations( # Specify custom transformations for entities + shift_dates={ + "max_days": 30, + "min_days": 30, + "entities": [DetectEntities.DOB] + } + ) + ) + + # Step 2: Call deidentify_text + deidentify_text_response = skyflow_client.detect('').deidentify_text(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the deidentified text response + print('Response: ', deidentify_text_response) + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +Sample Response: +```python +DeidentifyTextResponse( + processed_text='My SSN is [SSN_VqLazzA] and my card is [CREDIT_CARD_54lAgtk].', + entities=[ + EntityInfo( + token='SSN_VqLazzA', + value='123-45-6789', + text_index=TextIndex(start=10, end=21), + processed_index=TextIndex(start=10, end=23), + entity='SSN', + scores={'SSN': 0.9383999705314636} + ), + EntityInfo( + token='CREDIT_CARD_54lAgtk', + value='4111 1111 1111 1111', + text_index=TextIndex(start=37, end=56), + processed_index=TextIndex(start=39, end=60), + entity='CREDIT_CARD', + scores={'CREDIT_CARD': 0.9050999879837036} + ) + ], + word_count=9, + char_count=57 +) +``` + +### Reidentify Text + +To reidentify text, use the `reidentify_text` method. The `ReidentifyTextRequest` class creates a reidentify text request, which includes the redacted or deidentified text to be reidentified. + +#### Construct a Reidentify Text request + +```python +from skyflow.error import SkyflowError +from skyflow.vault.detect import ReidentifyTextRequest, ReidentifyFormat +""" +This example demonstrates how to reidentify text, along with corresponding ReidentifyTextRequest schema. +""" +try: + # Initialize Skyflow client + # Step 1: Create request to reidentify + request = ReidentifyTextRequest( + text="", # Text containing tokens to reidentify + redacted_entities=[""], # Entities to show redacted + masked_entities=[""], # Entities to show masked + plain_text_entities=[""] # Entities to show as plain text + ) + + # Step 2: Call reidentify_text + reidentify_text_response = skyflow_client.detect('').reidentify_text(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the reidentified text response + print('Response: ', reidentify_text_response) + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +#### An example for Reidentify Text call + +```python +from skyflow.error import SkyflowError +from skyflow.vault.detect import ReidentifyTextRequest, ReidentifyFormat +from skyflow.utils.enums import DetectEntities +""" + * Skyflow Text Re-identification Example + * + * This example demonstrates how to: + * 1. Configure credentials + * 2. Set up vault configuration + * 3. Create a reidentify text request + * 4. Use all available options for reidentification + * 5. Handle response and errors +""" +try: + # Initialize Skyflow Client + # Step 1: Create request with deidentified text + request = ReidentifyTextRequest( + text="My SSN is [SSN_VqLazzA] and my card is [CREDIT_CARD_54lAgtk].", + ) + + # Step 2: Call reidentify_text + reidentify_text_response = skyflow_client.detect('').reidentify_text(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the reidentified text response + print('Response: ', reidentify_text_response) + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +Sample Response: +```python +ReidentifyTextResponse( + processed_text='My SSN is 123-45-6789 and my card is 4111 1111 1111 1111.' +) +``` + +### Deidentify File +To deidentify files, use the `deidentify_file` method. The `DeidentifyFileRequest` class creates a deidentify file request, which includes the file to be deidentified and various configuration options. + +#### Construct a Deidentify File request +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, Bleep +""" +This example demonstrates how to deidentify file, along with corresponding DeidentifyFileRequest schema. +""" +try: + # Initialize Skyflow client + # Step 1: Open file for deidentification + file = open('', 'rb') # Open the file in read-binary mode + # Step 2: Create deidentify file request + request = DeidentifyFileRequest( + file=file, # File object to deidentify + entities=[DetectEntities.SSN, DetectEntities.CREDIT_CARD], # Entities to detect + + # Token format configuration + token_format=TokenFormat( + default=True, + vault_token=[DetectEntities.SSN] + ), + + # Output configuration + output_directory='', # Output directory for saving the deidentified file + wait_time=15, # Max wait time in seconds (max 64) + + # Image-specific options + # output_processed_image=True, # Include processed image + # output_ocr_text=True, # Include OCR text + # masking_method=MaskingMethod.BLACKBOX, # Masking method + + # PDF-specific options + # pixel_density=1.5, # PDF processing density + # max_resolution=2000, # Max PDF resolution + + # Audio-specific options + # output_processed_audio=True, # Include processed audio + # output_transcription=DetectOutputTranscriptions.PLAINTEXT, # Transcription type + + # Audio bleep configuration + # bleep=Bleep( + # gain=5, # Loudness in dB + # frequency=1000, # Pitch in Hz + # start_padding=0.1, # Start padding in seconds + # stop_padding=0.2 # End padding in seconds + # ) + ) + + # Step 3: Call deidentify_file + deidentify_file_response = skyflow_client.detect('').deidentify_file(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the reidentified text response + print('Response: ', deidentify_file_response) + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +#### An example for Deidentify File call + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Bleep +""" + * Skyflow Deidentify File Example + * + * This sample demonstrates how to use all available options for deidentifying files. + * Supported file types: images (jpg, png, etc.), pdf, audio (mp3, wav), documents, + * spreadsheets, presentations, structured text. +""" +try: + # Initialize Skyflow client + # Step 1: Open file for deidentification + file = open('sensitive_document.txt', 'rb') # Open the file in read-binary mode + # Step 2: Create deidentify file request + request = DeidentifyFileRequest( + file=file, # File object to deidentify + entities=[ + DetectEntities.SSN, + DetectEntities.CREDIT_CARD + ], + # Token format configuration + token_format=TokenFormat( + default=True, + vault_token=[DetectEntities.SSN] + ), + output_directory="/tmp/processed", # Output directory for saving the deidentified file + wait_time=30, # Max wait time in seconds (max 64) + ) + + # Step 3: Call deidentify_file + deidentify_file_response = skyflow_client.detect('').deidentify_file(request) + # Replace with your actual Skyflow vault ID + + # Step 3: Print the reidentified text response + print('Response: ', deidentify_file_response) + +except SkyflowError as error: + # Step 4: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +Sample Response +```python +DeidentifyFileResponse( + file='TXkgY2FyZCBudW1iZXIgaXMgW0NSRURJVF9DQVJEXQpteSBzZWNvbmQ…', # Base64 encoded file content + type='redacted_file', + extension='txt', + word_count=19, + char_count=111, + size_in_kb=0.11, + duration_in_seconds=None, + page_count=None, + slide_count=None, + entities=[ + { + 'file': 'W3sicHJvY2Vzc2VleHQiOiJDUkVESVRfQ0FSRCIsInRleHQiOiIxMjM0NTY0Nzg5MDEyMzQ1NiIsImxvY2F0aW9uIjp7InN0dF9pZHgiOjE4LCJlbmRfaWR4IjozNSwic3R0X2lkeF9wcm9jZXNzZWR…', # Base64 encoded JSON string of entities + 'type': 'entities', + 'extension': 'json' + } + ], + run_id='83abcdef-2b61-4a83-a4e0-cbc71ffabffd', + status='SUCCESS', + errors=[] +) +``` + +### Get Detect Run +To retrieve the results of a previously started file deidentification operation, use the `get_detect_run` method. The `GetDetectRunRequest` class is initialized with the run_id returned from a prior `deidentify_file` call. + +#### Construct a Get Detect Run request + +```python +from skyflow.error import SkyflowError +from skyflow.vault.detect import GetDetectRunRequest + +""" +Example program to demonstrate get detect run using run id, along with corresponding GetDetectRunRequest schema. +""" + +try: + # Initialize Skyflow client + # Step 1: Create GetDetectRunRequest + request = GetDetectRunRequest( + run_id='' # Replace with runId from deidentify_file + ) + + # Step 2: Call get_detect_run + get_detect_run_response = skyflow_client.detect('').get_detect_run(request) + # Replace with your actual vault ID + + # Print the response from the get detect run operation + print('Response: ', get_detect_run_response) + +except SkyflowError as error: + # Step 3: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes + +``` + +#### An example for Get Detect Run Call + +```python +from skyflow.error import SkyflowError +from skyflow.vault.detect import GetDetectRunRequest +""" + * Skyflow Get Detect Run Example + * + * This example demonstrates how to: + * 1. Configure credentials + * 2. Set up vault configuration + * 3. Create a get detect run request + * 4. Call getDetectRun to poll for file processing results + * 5. Handle response and errors +""" +try: + # Initialize Skyflow client + # Step 1: Create GetDetectRunRequest + request = GetDetectRunRequest( + run_id="48ec05ba-96ec-4641-a8e2-35e066afef95" + ) + + # Step 2: Call get_detect_run + get_detect_run_response = skyflow_client.detect('').get_detect_run(request) + # Replace with your actual vault ID + + # Print the response from the get detect run operation + print('Response: ', get_detect_run_response) + +except SkyflowError as error: + # Step 3: Handle any exceptions that may occur during the insert operation + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + +Sample Response +```python +DeidentifyFileResponse( + file='TXkgY2FyZCBudW1iZXIgaXMgW0NSRURJVF9DQVJEXQpteSBzZWNvbmQ…', # Base64 encoded file content + type='redacted_file', + extension='txt', + word_count=19, + char_count=111, + size_in_kb=0.11, + duration_in_seconds=None, + page_count=None, + slide_count=None, + entities=[ + { + 'file': 'W3sicHJvY2Vzc2VleHQiOiJDUkVESVRfQ0FSRCIsInRleHQiOiIxMjM0NTY0Nzg5MDEyMzQ1NiIsImxvY2F0aW9uIjp7InN0dF9pZHgiOjE4LCJlbmRfaWR4IjozNSwic3R0X2lkeF9wcm9jZXNzZWR…', # Base64 encoded JSON string of entities + 'type': 'entities', + 'extension': 'json' + } + ], + run_id='48ec05ba-96ec-4641-a8e2-35e066afef95', + status='SUCCESS', + errors=[] +) +``` + ### Connections Skyflow Connections is a gateway service that uses tokenization to securely send and receive data between your systems and first- or third-party services. The [connections](https://github.com/skyflowapi/skyflow-python/tree/v2/skyflow/vault/connection) module invokes both inbound and/or outbound connections. diff --git a/samples/detect_api/deidentify_file.py b/samples/detect_api/deidentify_file.py new file mode 100644 index 00000000..c9877d58 --- /dev/null +++ b/samples/detect_api/deidentify_file.py @@ -0,0 +1,105 @@ +from skyflow.error import SkyflowError +from skyflow import Env, Skyflow, LogLevel +from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, DateTransformation, Bleep + +""" + * Skyflow Deidentify File Example + * + * This sample demonstrates how to use all available options for deidentifying files. + * Supported file types: images (jpg, png, etc.), pdf, audio (mp3, wav), documents, + * spreadsheets, presentations, structured text. +""" + +def perform_file_deidentification(): + try: + # Step 1: Configure Credentials + credentials = { + 'path': '/path/to/credentials.json' # Path to credentials file + } + + # Step 2: Configure Vault + vault_config = { + 'vault_id': '', # Replace with your vault ID + 'cluster_id': '', # Replace with your cluster ID + 'env': Env.PROD, # Deployment environment + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(vault_config) + .set_log_level(LogLevel.INFO) # Use LogLevel.ERROR in production + .build() + ) + + # Step 4: Create File Object + file_path = '' # Replace with your file path + file = open(file_path, 'rb') + # Step 5: Configure Deidentify File Request with all options + deidentify_request = DeidentifyFileRequest( + file=file, # File object to deidentify + entities=[DetectEntities.SSN, DetectEntities.CREDIT_CARD], # Entities to detect + allow_regex_list=[''], # Optional: Patterns to allow + restrict_regex_list=[''], # Optional: Patterns to restrict + + # Token format configuration + token_format=TokenFormat( + vault_token=[DetectEntities.SSN], # Use vault tokens for these entities + ), + + # Optional: Custom transformations + # transformations=Transformations( + # shift_dates=DateTransformation( + # max_days=30, + # min_days=10, + # entities=[DetectEntities.DOB] + # ) + # ), + + # Output configuration + output_directory='', # Where to save processed file + wait_time=15, # Max wait time in seconds (max 64) + + # Image-specific options + output_processed_image=True, # Include processed image in output + output_ocr_text=True, # Include OCR text in response + masking_method=MaskingMethod.BLACKBOX, # Masking method for images + + # PDF-specific options + pixel_density=15, # Pixel density for PDF processing + max_resolution=2000, # Max resolution for PDF + + # Audio-specific options + output_processed_audio=True, # Include processed audio + output_transcription=DetectOutputTranscriptions.PLAINTEXT_TRANSCRIPTION, # Transcription type + + # Audio bleep configuration + + # bleep=Bleep( + # gain=5, # Loudness in dB + # frequency=1000, # Pitch in Hz + # start_padding=0.1, # Padding at start (seconds) + # stop_padding=0.2 # Padding at end (seconds) + # ) + ) + + # Step 6: Call deidentifyFile API + response = skyflow_client.detect().deidentify_file(deidentify_request) + + # Handle Successful Response + print("\nDeidentify File Response:", response) + + except SkyflowError as error: + # Handle Skyflow-specific errors + print('\nSkyflow Error:', { + 'http_code': error.http_code, + 'grpc_code': error.grpc_code, + 'http_status': error.http_status, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + # Handle unexpected errors + print('Unexpected Error:', error) diff --git a/samples/detect_api/deidentify_text.py b/samples/detect_api/deidentify_text.py new file mode 100644 index 00000000..c543b6f8 --- /dev/null +++ b/samples/detect_api/deidentify_text.py @@ -0,0 +1,82 @@ +from skyflow.error import SkyflowError +from skyflow import Env, Skyflow, LogLevel +from skyflow.utils.enums import DetectEntities +from skyflow.vault.detect import DeidentifyTextRequest, TokenFormat, Transformations, DateTransformation + +""" + * Skyflow Text De-identification Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a deidentify text request with all available options + * 4. Handle response and errors +""" + +def perform_text_deidentification(): + try: + # Step 1: Configure Credentials + credentials = { + 'path': '/path/to/credentials.json' # Path to credentials file + } + + # Step 2: Configure Vault + vault_config = { + 'vault_id': '', # Replace with your vault ID + 'cluster_id': '', # Replace with your cluster ID + 'env': Env.PROD, # Deployment environment + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(vault_config) + .set_log_level(LogLevel.ERROR) + .build() + ) + + # Step 4: Prepare Sample Text + sample_text = "My SSN is 123-45-6789 and my card is 4111 1111 1111 1111." + + # Step 5: Configure Token Format + token_format = TokenFormat( + vault_token=[DetectEntities.CREDIT_CARD, DetectEntities.SSN], # Use vault tokens for these entities + ) + + # Step 6: Configure Transformations + transformations = Transformations( + shift_dates=DateTransformation( + max_days=30, # Maximum days to shift + min_days=30, # Minimum days to shift + entities=[DetectEntities.DOB] # Apply shift to DOB entities + ) + ) + + # Step 7: Create Deidentify Request + deidentify_request = DeidentifyTextRequest( + text=sample_text, + entities=[DetectEntities.CREDIT_CARD, DetectEntities.SSN], # Entities to detect and deidentify + token_format=token_format, + transformations=transformations, + allow_regex_list=[''], # Optional: regex patterns to allow + restrict_regex_list=[''] # Optional: regex patterns to restrict + ) + + # Step 8: Perform Text Deidentification + response = skyflow_client.detect().deidentify_text(deidentify_request) + + # Handle Successful Response + print("\nDeidentify Text Response:", response) + + except SkyflowError as error: + # Handle Skyflow-specific errors + print('\nSkyflow Error:', { + 'http_code': error.http_code, + 'grpc_code': error.grpc_code, + 'http_status': error.http_status, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) diff --git a/samples/detect_api/get_detect_run.py b/samples/detect_api/get_detect_run.py new file mode 100644 index 00000000..c2380c27 --- /dev/null +++ b/samples/detect_api/get_detect_run.py @@ -0,0 +1,61 @@ +from skyflow.error import SkyflowError +from skyflow import Env, Skyflow, LogLevel +from skyflow.vault.detect import GetDetectRunRequest + +""" + * Skyflow Get Detect Run Example + * + * This example demonstrates how to: + * 1. Configure credentials + * 2. Set up vault configuration + * 3. Create a get detect run request + * 4. Call getDetectRun to poll for file processing results + * 5. Handle response and errors +""" + +def perform_get_detect_run(): + try: + # Step 1: Configure Credentials + credentials = { + 'path': '/path/to/credentials.json' # Path to credentials file + } + + # Step 2: Configure Vault + vault_config = { + 'vault_id': '', # Replace with your vault ID + 'cluster_id': '', # Replace with your cluster ID + 'env': Env.PROD, # Deployment environment + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(vault_config) + .set_log_level(LogLevel.INFO) # Use LogLevel.ERROR in production + .build() + ) + + # Step 4: Create GetDetectRunRequest + get_detect_run_request = GetDetectRunRequest( + run_id='' # Replace with the runId from deidentifyFile call + ) + + # Step 5: Call getDetectRun API + response = skyflow_client.detect().get_detect_run(get_detect_run_request) + + # Handle Successful Response + print("\nGet Detect Run Response:", response) + + except SkyflowError as error: + # Handle Skyflow-specific errors + print('\nSkyflow Error:', { + 'http_code': error.http_code, + 'grpc_code': error.grpc_code, + 'http_status': error.http_status, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + # Handle unexpected errors + print('Unexpected Error:', error) diff --git a/samples/detect_api/reidentify_text.py b/samples/detect_api/reidentify_text.py new file mode 100644 index 00000000..d158733f --- /dev/null +++ b/samples/detect_api/reidentify_text.py @@ -0,0 +1,66 @@ +from skyflow.error import SkyflowError +from skyflow import Env, Skyflow, LogLevel +from skyflow.utils.enums import DetectEntities +from skyflow.vault.detect import ReidentifyTextRequest + +""" + * Skyflow Text Re-identification Example + * + * This example demonstrates how to: + * 1. Configure credentials + * 2. Set up vault configuration + * 3. Create a reidentify text request + * 4. Use all available options for reidentification + * 5. Handle response and errors +""" + +def perform_text_reidentification(): + try: + # Step 1: Configure Credentials + credentials = { + 'path': '/path/to/credentials.json' # Path to credentials file + } + + # Step 2: Configure Vault + vault_config = { + 'vault_id': '', # Replace with your vault ID + 'cluster_id': '', # Replace with your cluster ID + 'env': Env.PROD, # Deployment environment + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(vault_config) + .set_log_level(LogLevel.ERROR) + .build() + ) + + # Step 4: Prepare Sample Redacted Text + redacted_text = "" # Replace with your redacted text + + # Step 5: Create Reidentify Request + reidentify_request = ReidentifyTextRequest( + text=redacted_text, + plain_text_entities=[DetectEntities.PHONE_NUMBER] + ) + + # Step 6: Perform Text Reidentification + response = skyflow_client.detect().reidentify_text(reidentify_request) + + # Step 7: Handle Successful Response + print("\nReidentify Text Response:", response) + + except SkyflowError as error: + # Handle Skyflow-specific errors + print('\nSkyflow Error:', { + 'http_code': error.http_code, + 'grpc_code': error.grpc_code, + 'http_status': error.http_status, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + # Handle unexpected errors + print('Unexpected Error:', error) diff --git a/setup.py b/setup.py index 09cea753..8c09ec2e 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b3' +current_version = '2.0.0b6' setup( name='skyflow', diff --git a/skyflow/client/skyflow.py b/skyflow/client/skyflow.py index 1c87bcaa..9f0d9dbf 100644 --- a/skyflow/client/skyflow.py +++ b/skyflow/client/skyflow.py @@ -8,6 +8,7 @@ from skyflow.vault.client.client import VaultClient from skyflow.vault.controller import Vault from skyflow.vault.controller import Connection +from skyflow.vault.controller import Detect class Skyflow: def __init__(self, builder): @@ -65,11 +66,15 @@ def update_log_level(self, log_level): def vault(self, vault_id = None) -> Vault: vault_config = self.__builder.get_vault_config(vault_id) - return vault_config.get("controller") + return vault_config.get("vault_controller") def connection(self, connection_id = None) -> Connection: connection_config = self.__builder.get_connection_config(connection_id) return connection_config.get("controller") + + def detect(self, vault_id = None) -> Detect: + vault_config = self.__builder.get_vault_config(vault_id) + return vault_config.get("detect_controller") class Builder: def __init__(self): @@ -182,9 +187,11 @@ def __add_vault_config(self, config): vault_client = VaultClient(config) self.__vault_configs[vault_id] = { "vault_client": vault_client, - "controller": Vault(vault_client) + "vault_controller": Vault(vault_client), + "detect_controller": Detect(vault_client) } log_info(SkyflowMessages.Info.VAULT_CONTROLLER_INITIALIZED.value.format(config.get("vault_id")), self.__logger) + log_info(SkyflowMessages.Info.DETECT_CONTROLLER_INITIALIZED.value.format(config.get("vault_id")), self.__logger) def __add_connection_config(self, config): validate_connection_config(self.__logger, config) diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index af42a8fa..9ff683cb 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -3,6 +3,11 @@ # isort: skip_file from .types import ( + AdvancedOptionsColumnMapping, + AdvancedOptionsEntityColumnMap, + AdvancedOptionsVaultSchema, + AllowRegex, + AudioConfigTranscriptionType, AuditEventAuditResourceType, AuditEventContext, AuditEventData, @@ -10,11 +15,45 @@ BatchRecordMethod, ContextAccessType, ContextAuthMode, + DeidentifyFileOutput, + DeidentifyFileOutputProcessedFileType, + DeidentifyFileResponse, + DeidentifyStatusResponse, + DeidentifyStatusResponseOutputType, + DeidentifyStatusResponseStatus, + DeidentifyStatusResponseWordCharacterCount, + DeidentifyStringResponse, + DetectDataAccuracy, + DetectDataEntities, + DetectFileRequestDataType, + DetectRequestDeidentifyOption, + DetectedEntity, DetokenizeRecordResponseValueType, + EntityLocation, + EntityType, + EntityTypes, + ErrorResponse, + ErrorResponseError, + ErrorString, GooglerpcStatus, + ProcessedFileOutputProcessedFileType, ProtobufAny, RedactionEnumRedaction, + ReidentifyStringResponse, RequestActionType, + ResourceId, + RestrictRegex, + TokenType, + TokenTypeDefault, + TokenTypeWithoutVault, + TokenTypeWithoutVaultDefault, + Transformations, + TransformationsShiftDates, + TransformationsShiftDatesEntityTypesItem, + Uuid, + V1AdvancedOptions, + V1AudioConfig, + V1AudioOptions, V1AuditAfterOptions, V1AuditEventResponse, V1AuditResponse, @@ -29,26 +68,39 @@ V1Card, V1DeleteFileResponse, V1DeleteRecordResponse, + V1DetectFileResponse, + V1DetectStatusResponse, + V1DetectStatusResponseStatus, + V1DetectTextRequest, + V1DetectTextResponse, V1DetokenizeRecordRequest, V1DetokenizeRecordResponse, V1DetokenizeResponse, V1FieldRecords, V1FileAvScanStatus, + V1FileDataFormat, V1GetAuthTokenResponse, V1GetFileScanStatusResponse, V1GetQueryResponse, + V1ImageOptions, V1InsertRecordResponse, + V1Locations, V1MemberType, + V1PdfConfig, + V1PdfOptions, + V1ProcessedFileOutput, V1RecordMetaProperties, + V1ResponseEntities, V1TokenizeRecordRequest, V1TokenizeRecordResponse, V1TokenizeResponse, V1UpdateRecordResponse, V1VaultFieldMapping, V1VaultSchemaConfig, + VaultId, ) -from .errors import BadRequestError, NotFoundError, UnauthorizedError -from . import audit, authentication, bin_lookup, query, records, tokens +from .errors import BadRequestError, InternalServerError, NotFoundError, UnauthorizedError +from . import audit, authentication, bin_lookup, deprecated, files, query, records, strings, tokens from .audit import ( AuditServiceListAuditEventsRequestFilterOpsActionType, AuditServiceListAuditEventsRequestFilterOpsContextAccessType, @@ -59,15 +111,41 @@ ) from .client import AsyncSkyflow, Skyflow from .environment import SkyflowEnvironment +from .files import ( + DeidentifyAudioRequestFile, + DeidentifyAudioRequestFileDataFormat, + DeidentifyAudioRequestOutputTranscription, + DeidentifyDocumentRequestFile, + DeidentifyDocumentRequestFileDataFormat, + DeidentifyFileRequestFile, + DeidentifyFileRequestFileDataFormat, + DeidentifyImageRequestFile, + DeidentifyImageRequestFileDataFormat, + DeidentifyImageRequestMaskingMethod, + DeidentifyPdfRequestFile, + DeidentifyPresentationRequestFile, + DeidentifyPresentationRequestFileDataFormat, + DeidentifySpreadsheetRequestFile, + DeidentifySpreadsheetRequestFileDataFormat, + DeidentifyStructuredTextRequestFile, + DeidentifyStructuredTextRequestFileDataFormat, + DeidentifyTextRequestFile, +) from .records import ( RecordServiceBulkGetRecordRequestOrderBy, RecordServiceBulkGetRecordRequestRedaction, RecordServiceGetRecordRequestRedaction, ) +from .strings import ReidentifyStringRequestFormat from .version import __version__ __all__ = [ + "AdvancedOptionsColumnMapping", + "AdvancedOptionsEntityColumnMap", + "AdvancedOptionsVaultSchema", + "AllowRegex", "AsyncSkyflow", + "AudioConfigTranscriptionType", "AuditEventAuditResourceType", "AuditEventContext", "AuditEventData", @@ -82,18 +160,72 @@ "BatchRecordMethod", "ContextAccessType", "ContextAuthMode", + "DeidentifyAudioRequestFile", + "DeidentifyAudioRequestFileDataFormat", + "DeidentifyAudioRequestOutputTranscription", + "DeidentifyDocumentRequestFile", + "DeidentifyDocumentRequestFileDataFormat", + "DeidentifyFileOutput", + "DeidentifyFileOutputProcessedFileType", + "DeidentifyFileRequestFile", + "DeidentifyFileRequestFileDataFormat", + "DeidentifyFileResponse", + "DeidentifyImageRequestFile", + "DeidentifyImageRequestFileDataFormat", + "DeidentifyImageRequestMaskingMethod", + "DeidentifyPdfRequestFile", + "DeidentifyPresentationRequestFile", + "DeidentifyPresentationRequestFileDataFormat", + "DeidentifySpreadsheetRequestFile", + "DeidentifySpreadsheetRequestFileDataFormat", + "DeidentifyStatusResponse", + "DeidentifyStatusResponseOutputType", + "DeidentifyStatusResponseStatus", + "DeidentifyStatusResponseWordCharacterCount", + "DeidentifyStringResponse", + "DeidentifyStructuredTextRequestFile", + "DeidentifyStructuredTextRequestFileDataFormat", + "DeidentifyTextRequestFile", + "DetectDataAccuracy", + "DetectDataEntities", + "DetectFileRequestDataType", + "DetectRequestDeidentifyOption", + "DetectedEntity", "DetokenizeRecordResponseValueType", + "EntityLocation", + "EntityType", + "EntityTypes", + "ErrorResponse", + "ErrorResponseError", + "ErrorString", "GooglerpcStatus", + "InternalServerError", "NotFoundError", + "ProcessedFileOutputProcessedFileType", "ProtobufAny", "RecordServiceBulkGetRecordRequestOrderBy", "RecordServiceBulkGetRecordRequestRedaction", "RecordServiceGetRecordRequestRedaction", "RedactionEnumRedaction", + "ReidentifyStringRequestFormat", + "ReidentifyStringResponse", "RequestActionType", + "ResourceId", + "RestrictRegex", "Skyflow", "SkyflowEnvironment", + "TokenType", + "TokenTypeDefault", + "TokenTypeWithoutVault", + "TokenTypeWithoutVaultDefault", + "Transformations", + "TransformationsShiftDates", + "TransformationsShiftDatesEntityTypesItem", "UnauthorizedError", + "Uuid", + "V1AdvancedOptions", + "V1AudioConfig", + "V1AudioOptions", "V1AuditAfterOptions", "V1AuditEventResponse", "V1AuditResponse", @@ -108,28 +240,44 @@ "V1Card", "V1DeleteFileResponse", "V1DeleteRecordResponse", + "V1DetectFileResponse", + "V1DetectStatusResponse", + "V1DetectStatusResponseStatus", + "V1DetectTextRequest", + "V1DetectTextResponse", "V1DetokenizeRecordRequest", "V1DetokenizeRecordResponse", "V1DetokenizeResponse", "V1FieldRecords", "V1FileAvScanStatus", + "V1FileDataFormat", "V1GetAuthTokenResponse", "V1GetFileScanStatusResponse", "V1GetQueryResponse", + "V1ImageOptions", "V1InsertRecordResponse", + "V1Locations", "V1MemberType", + "V1PdfConfig", + "V1PdfOptions", + "V1ProcessedFileOutput", "V1RecordMetaProperties", + "V1ResponseEntities", "V1TokenizeRecordRequest", "V1TokenizeRecordResponse", "V1TokenizeResponse", "V1UpdateRecordResponse", "V1VaultFieldMapping", "V1VaultSchemaConfig", + "VaultId", "__version__", "audit", "authentication", "bin_lookup", + "deprecated", + "files", "query", "records", + "strings", "tokens", ] diff --git a/skyflow/generated/rest/audit/client.py b/skyflow/generated/rest/audit/client.py index 7e22b077..34d589d1 100644 --- a/skyflow/generated/rest/audit/client.py +++ b/skyflow/generated/rest/audit/client.py @@ -200,8 +200,13 @@ def audit_service_list_audit_events( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.audit.audit_service_list_audit_events(filter_ops_account_id='filterOps.accountID', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.audit.audit_service_list_audit_events( + filter_ops_account_id="filterOps.accountID", + ) """ _response = self._raw_client.audit_service_list_audit_events( filter_ops_account_id=filter_ops_account_id, @@ -415,11 +420,21 @@ async def audit_service_list_audit_events( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.audit.audit_service_list_audit_events(filter_ops_account_id='filterOps.accountID', ) + await client.audit.audit_service_list_audit_events( + filter_ops_account_id="filterOps.accountID", + ) + + asyncio.run(main()) """ _response = await self._raw_client.audit_service_list_audit_events( diff --git a/skyflow/generated/rest/audit/raw_client.py b/skyflow/generated/rest/audit/raw_client.py index b67b025e..3d1277bf 100644 --- a/skyflow/generated/rest/audit/raw_client.py +++ b/skyflow/generated/rest/audit/raw_client.py @@ -244,9 +244,9 @@ def audit_service_list_audit_events( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -471,9 +471,9 @@ async def audit_service_list_audit_events( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/authentication/client.py b/skyflow/generated/rest/authentication/client.py index 81408a26..2f1e804e 100644 --- a/skyflow/generated/rest/authentication/client.py +++ b/skyflow/generated/rest/authentication/client.py @@ -71,8 +71,14 @@ def authentication_service_get_auth_token( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.authentication.authentication_service_get_auth_token(grant_type='urn:ietf:params:oauth:grant-type:jwt-bearer', assertion='eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.authentication.authentication_service_get_auth_token( + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", + ) """ _response = self._raw_client.authentication_service_get_auth_token( grant_type=grant_type, @@ -145,11 +151,22 @@ async def authentication_service_get_auth_token( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.authentication.authentication_service_get_auth_token(grant_type='urn:ietf:params:oauth:grant-type:jwt-bearer', assertion='eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3', ) + await client.authentication.authentication_service_get_auth_token( + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion="eyLhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaXNzIjoiY29tcGFueSIsImV4cCI6MTYxNTE5MzgwNywiaWF0IjoxNjE1MTY1MDQwLCJhdWQiOiKzb21lYXVkaWVuY2UifQ.4pcPyMDQ9o1PSyXnrXCjTwXyr4BSezdI1AVTmud2fU3", + ) + + asyncio.run(main()) """ _response = await self._raw_client.authentication_service_get_auth_token( diff --git a/skyflow/generated/rest/authentication/raw_client.py b/skyflow/generated/rest/authentication/raw_client.py index bb1c2ed7..ad2caef6 100644 --- a/skyflow/generated/rest/authentication/raw_client.py +++ b/skyflow/generated/rest/authentication/raw_client.py @@ -94,9 +94,9 @@ def authentication_service_get_auth_token( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -105,9 +105,9 @@ def authentication_service_get_auth_token( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -116,9 +116,9 @@ def authentication_service_get_auth_token( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -206,9 +206,9 @@ async def authentication_service_get_auth_token( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -217,9 +217,9 @@ async def authentication_service_get_auth_token( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -228,9 +228,9 @@ async def authentication_service_get_auth_token( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/bin_lookup/client.py b/skyflow/generated/rest/bin_lookup/client.py index a217ae60..fe8c892a 100644 --- a/skyflow/generated/rest/bin_lookup/client.py +++ b/skyflow/generated/rest/bin_lookup/client.py @@ -63,8 +63,13 @@ def bin_list_service_list_cards_of_bin( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.bin_lookup.bin_list_service_list_cards_of_bin(bin='012345', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.bin_lookup.bin_list_service_list_cards_of_bin( + bin="012345", + ) """ _response = self._raw_client.bin_list_service_list_cards_of_bin( fields=fields, @@ -126,11 +131,21 @@ async def bin_list_service_list_cards_of_bin( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.bin_lookup.bin_list_service_list_cards_of_bin(bin='012345', ) + await client.bin_lookup.bin_list_service_list_cards_of_bin( + bin="012345", + ) + + asyncio.run(main()) """ _response = await self._raw_client.bin_list_service_list_cards_of_bin( diff --git a/skyflow/generated/rest/bin_lookup/raw_client.py b/skyflow/generated/rest/bin_lookup/raw_client.py index 90202931..b82f4c84 100644 --- a/skyflow/generated/rest/bin_lookup/raw_client.py +++ b/skyflow/generated/rest/bin_lookup/raw_client.py @@ -85,9 +85,9 @@ def bin_list_service_list_cards_of_bin( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -166,9 +166,9 @@ async def bin_list_service_list_cards_of_bin( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/client.py b/skyflow/generated/rest/client.py index 3dab76fb..315d1f86 100644 --- a/skyflow/generated/rest/client.py +++ b/skyflow/generated/rest/client.py @@ -7,9 +7,12 @@ from .authentication.client import AsyncAuthenticationClient, AuthenticationClient from .bin_lookup.client import AsyncBinLookupClient, BinLookupClient from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .deprecated.client import AsyncDeprecatedClient, DeprecatedClient from .environment import SkyflowEnvironment +from .files.client import AsyncFilesClient, FilesClient from .query.client import AsyncQueryClient, QueryClient from .records.client import AsyncRecordsClient, RecordsClient +from .strings.client import AsyncStringsClient, StringsClient from .tokens.client import AsyncTokensClient, TokensClient @@ -25,6 +28,8 @@ class Skyflow: environment : SkyflowEnvironment The environment to use for requests from the client. from .environment import SkyflowEnvironment + + Defaults to SkyflowEnvironment.PRODUCTION @@ -42,7 +47,10 @@ class Skyflow: Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) + + client = Skyflow( + token="YOUR_TOKEN", + ) """ def __init__( @@ -74,6 +82,9 @@ def __init__( self.tokens = TokensClient(client_wrapper=self._client_wrapper) self.query = QueryClient(client_wrapper=self._client_wrapper) self.authentication = AuthenticationClient(client_wrapper=self._client_wrapper) + self.deprecated = DeprecatedClient(client_wrapper=self._client_wrapper) + self.strings = StringsClient(client_wrapper=self._client_wrapper) + self.files = FilesClient(client_wrapper=self._client_wrapper) class AsyncSkyflow: @@ -88,6 +99,8 @@ class AsyncSkyflow: environment : SkyflowEnvironment The environment to use for requests from the client. from .environment import SkyflowEnvironment + + Defaults to SkyflowEnvironment.PRODUCTION @@ -105,7 +118,10 @@ class AsyncSkyflow: Examples -------- from skyflow import AsyncSkyflow - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) """ def __init__( @@ -137,6 +153,9 @@ def __init__( self.tokens = AsyncTokensClient(client_wrapper=self._client_wrapper) self.query = AsyncQueryClient(client_wrapper=self._client_wrapper) self.authentication = AsyncAuthenticationClient(client_wrapper=self._client_wrapper) + self.deprecated = AsyncDeprecatedClient(client_wrapper=self._client_wrapper) + self.strings = AsyncStringsClient(client_wrapper=self._client_wrapper) + self.files = AsyncFilesClient(client_wrapper=self._client_wrapper) def _get_base_url(*, base_url: typing.Optional[str] = None, environment: SkyflowEnvironment) -> str: diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index b1396aef..aa31aea3 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -22,7 +22,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "skyflow.generated.rest", - "X-Fern-SDK-Version": "0.0.166", + "X-Fern-SDK-Version": "0.0.209", } headers["Authorization"] = f"Bearer {self._get_token()}" return headers diff --git a/skyflow/generated/rest/core/force_multipart.py b/skyflow/generated/rest/core/force_multipart.py new file mode 100644 index 00000000..ae24ccff --- /dev/null +++ b/skyflow/generated/rest/core/force_multipart.py @@ -0,0 +1,16 @@ +# This file was auto-generated by Fern from our API Definition. + + +class ForceMultipartDict(dict): + """ + A dictionary subclass that always evaluates to True in boolean contexts. + + This is used to force multipart/form-data encoding in HTTP requests even when + the dictionary is empty, which would normally evaluate to False. + """ + + def __bool__(self): + return True + + +FORCE_MULTIPART = ForceMultipartDict() diff --git a/skyflow/generated/rest/core/http_client.py b/skyflow/generated/rest/core/http_client.py index e7bd4f79..e4173f99 100644 --- a/skyflow/generated/rest/core/http_client.py +++ b/skyflow/generated/rest/core/http_client.py @@ -11,10 +11,12 @@ import httpx from .file import File, convert_file_dict_to_httpx_tuples +from .force_multipart import FORCE_MULTIPART from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from httpx._types import RequestFiles INITIAL_RETRY_DELAY_SECONDS = 0.5 MAX_RETRY_DELAY_SECONDS = 10 @@ -178,11 +180,17 @@ def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -193,6 +201,15 @@ def request( json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -225,11 +242,7 @@ def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) @@ -264,11 +277,17 @@ def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.Iterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -277,6 +296,15 @@ def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) with self.httpx_client.stream( @@ -311,11 +339,7 @@ def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream @@ -354,11 +378,17 @@ async def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -367,6 +397,15 @@ async def request( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) # Add the input to each of these and do None-safety checks @@ -402,11 +441,7 @@ async def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) @@ -440,11 +475,17 @@ async def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.AsyncIterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -453,6 +494,15 @@ async def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) async with self.httpx_client.stream( @@ -487,11 +537,7 @@ async def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream diff --git a/skyflow/generated/rest/core/pydantic_utilities.py b/skyflow/generated/rest/core/pydantic_utilities.py index 60a2c713..0360ef49 100644 --- a/skyflow/generated/rest/core/pydantic_utilities.py +++ b/skyflow/generated/rest/core/pydantic_utilities.py @@ -181,7 +181,7 @@ def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any if IS_PYDANTIC_V2: - class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[name-defined, type-arg] + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] pass UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] diff --git a/skyflow/generated/rest/deprecated/__init__.py b/skyflow/generated/rest/deprecated/__init__.py new file mode 100644 index 00000000..5cde0202 --- /dev/null +++ b/skyflow/generated/rest/deprecated/__init__.py @@ -0,0 +1,4 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + diff --git a/skyflow/generated/rest/deprecated/client.py b/skyflow/generated/rest/deprecated/client.py new file mode 100644 index 00000000..bd1cc88c --- /dev/null +++ b/skyflow/generated/rest/deprecated/client.py @@ -0,0 +1,598 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.detect_data_accuracy import DetectDataAccuracy +from ..types.detect_data_entities import DetectDataEntities +from ..types.detect_file_request_data_type import DetectFileRequestDataType +from ..types.detect_request_deidentify_option import DetectRequestDeidentifyOption +from ..types.v_1_advanced_options import V1AdvancedOptions +from ..types.v_1_audio_config import V1AudioConfig +from ..types.v_1_detect_file_response import V1DetectFileResponse +from ..types.v_1_detect_status_response import V1DetectStatusResponse +from ..types.v_1_detect_text_response import V1DetectTextResponse +from ..types.v_1_file_data_format import V1FileDataFormat +from ..types.v_1_image_options import V1ImageOptions +from ..types.v_1_pdf_config import V1PdfConfig +from .raw_client import AsyncRawDeprecatedClient, RawDeprecatedClient + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class DeprecatedClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._raw_client = RawDeprecatedClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawDeprecatedClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawDeprecatedClient + """ + return self._raw_client + + def detect_service_detect_file_input( + self, + *, + file: str, + data_format: V1FileDataFormat, + input_type: DetectFileRequestDataType, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + audio: typing.Optional[V1AudioConfig] = OMIT, + image: typing.Optional[V1ImageOptions] = OMIT, + pdf: typing.Optional[V1PdfConfig] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetectFileResponse: + """ + Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. + + Parameters + ---------- + file : str + Path of the file or base64-encoded data that has to be processed. + + data_format : V1FileDataFormat + + input_type : DetectFileRequestDataType + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + audio : typing.Optional[V1AudioConfig] + + image : typing.Optional[V1ImageOptions] + + pdf : typing.Optional[V1PdfConfig] + + advanced_options : typing.Optional[V1AdvancedOptions] + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow, V1AudioConfig + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.deprecated.detect_service_detect_file_input( + file="fkdjfhdlnnggtsjj...", + data_format="mp3", + input_type="BASE64", + vault_id="a372f752689c9bfc8ca3d4dba", + restrict_entity_types=[ + "name", + "age", + "location", + "ssn", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "numerical_pii", + "phone_number", + "medical_code", + "account_number", + "gender_sexuality", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + ], + return_entities=True, + accuracy="high_multilingual", + audio=V1AudioConfig( + output_transcription="none", + output_processed_audio=False, + ), + ) + """ + _response = self._raw_client.detect_service_detect_file_input( + file=file, + data_format=data_format, + input_type=input_type, + vault_id=vault_id, + session_id=session_id, + restrict_entity_types=restrict_entity_types, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + return_entities=return_entities, + accuracy=accuracy, + audio=audio, + image=image, + pdf=pdf, + advanced_options=advanced_options, + deidentify_token_format=deidentify_token_format, + request_options=request_options, + ) + return _response.data + + def detect_service_detect_status( + self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> V1DetectStatusResponse: + """ + Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. + + Parameters + ---------- + id : str + ID of the deidentification request. + + vault_id : typing.Optional[str] + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectStatusResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.deprecated.detect_service_detect_status( + id="ID", + ) + """ + _response = self._raw_client.detect_service_detect_status( + id, vault_id=vault_id, request_options=request_options + ) + return _response.data + + def detect_service_detect_text( + self, + *, + text: str, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + store_entities: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetectTextResponse: + """ + Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. + + Parameters + ---------- + text : str + Data to deidentify. + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + advanced_options : typing.Optional[V1AdvancedOptions] + + store_entities : typing.Optional[bool] + Indicates whether entities should be stored in the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectTextResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.deprecated.detect_service_detect_text( + text="text", + vault_id="c848741aefb74bf38780da5399a76507", + ) + """ + _response = self._raw_client.detect_service_detect_text( + text=text, + vault_id=vault_id, + session_id=session_id, + restrict_entity_types=restrict_entity_types, + deidentify_token_format=deidentify_token_format, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + return_entities=return_entities, + accuracy=accuracy, + advanced_options=advanced_options, + store_entities=store_entities, + request_options=request_options, + ) + return _response.data + + +class AsyncDeprecatedClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._raw_client = AsyncRawDeprecatedClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawDeprecatedClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawDeprecatedClient + """ + return self._raw_client + + async def detect_service_detect_file_input( + self, + *, + file: str, + data_format: V1FileDataFormat, + input_type: DetectFileRequestDataType, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + audio: typing.Optional[V1AudioConfig] = OMIT, + image: typing.Optional[V1ImageOptions] = OMIT, + pdf: typing.Optional[V1PdfConfig] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetectFileResponse: + """ + Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. + + Parameters + ---------- + file : str + Path of the file or base64-encoded data that has to be processed. + + data_format : V1FileDataFormat + + input_type : DetectFileRequestDataType + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + audio : typing.Optional[V1AudioConfig] + + image : typing.Optional[V1ImageOptions] + + pdf : typing.Optional[V1PdfConfig] + + advanced_options : typing.Optional[V1AdvancedOptions] + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow, V1AudioConfig + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.deprecated.detect_service_detect_file_input( + file="fkdjfhdlnnggtsjj...", + data_format="mp3", + input_type="BASE64", + vault_id="a372f752689c9bfc8ca3d4dba", + restrict_entity_types=[ + "name", + "age", + "location", + "ssn", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "numerical_pii", + "phone_number", + "medical_code", + "account_number", + "gender_sexuality", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + ], + return_entities=True, + accuracy="high_multilingual", + audio=V1AudioConfig( + output_transcription="none", + output_processed_audio=False, + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.detect_service_detect_file_input( + file=file, + data_format=data_format, + input_type=input_type, + vault_id=vault_id, + session_id=session_id, + restrict_entity_types=restrict_entity_types, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + return_entities=return_entities, + accuracy=accuracy, + audio=audio, + image=image, + pdf=pdf, + advanced_options=advanced_options, + deidentify_token_format=deidentify_token_format, + request_options=request_options, + ) + return _response.data + + async def detect_service_detect_status( + self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> V1DetectStatusResponse: + """ + Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. + + Parameters + ---------- + id : str + ID of the deidentification request. + + vault_id : typing.Optional[str] + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectStatusResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.deprecated.detect_service_detect_status( + id="ID", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.detect_service_detect_status( + id, vault_id=vault_id, request_options=request_options + ) + return _response.data + + async def detect_service_detect_text( + self, + *, + text: str, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + store_entities: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> V1DetectTextResponse: + """ + Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. + + Parameters + ---------- + text : str + Data to deidentify. + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + advanced_options : typing.Optional[V1AdvancedOptions] + + store_entities : typing.Optional[bool] + Indicates whether entities should be stored in the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + V1DetectTextResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.deprecated.detect_service_detect_text( + text="text", + vault_id="c848741aefb74bf38780da5399a76507", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.detect_service_detect_text( + text=text, + vault_id=vault_id, + session_id=session_id, + restrict_entity_types=restrict_entity_types, + deidentify_token_format=deidentify_token_format, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + return_entities=return_entities, + accuracy=accuracy, + advanced_options=advanced_options, + store_entities=store_entities, + request_options=request_options, + ) + return _response.data diff --git a/skyflow/generated/rest/deprecated/raw_client.py b/skyflow/generated/rest/deprecated/raw_client.py new file mode 100644 index 00000000..11dd7ef1 --- /dev/null +++ b/skyflow/generated/rest/deprecated/raw_client.py @@ -0,0 +1,624 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.not_found_error import NotFoundError +from ..types.detect_data_accuracy import DetectDataAccuracy +from ..types.detect_data_entities import DetectDataEntities +from ..types.detect_file_request_data_type import DetectFileRequestDataType +from ..types.detect_request_deidentify_option import DetectRequestDeidentifyOption +from ..types.v_1_advanced_options import V1AdvancedOptions +from ..types.v_1_audio_config import V1AudioConfig +from ..types.v_1_detect_file_response import V1DetectFileResponse +from ..types.v_1_detect_status_response import V1DetectStatusResponse +from ..types.v_1_detect_text_response import V1DetectTextResponse +from ..types.v_1_file_data_format import V1FileDataFormat +from ..types.v_1_image_options import V1ImageOptions +from ..types.v_1_pdf_config import V1PdfConfig + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawDeprecatedClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def detect_service_detect_file_input( + self, + *, + file: str, + data_format: V1FileDataFormat, + input_type: DetectFileRequestDataType, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + audio: typing.Optional[V1AudioConfig] = OMIT, + image: typing.Optional[V1ImageOptions] = OMIT, + pdf: typing.Optional[V1PdfConfig] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1DetectFileResponse]: + """ + Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. + + Parameters + ---------- + file : str + Path of the file or base64-encoded data that has to be processed. + + data_format : V1FileDataFormat + + input_type : DetectFileRequestDataType + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + audio : typing.Optional[V1AudioConfig] + + image : typing.Optional[V1ImageOptions] + + pdf : typing.Optional[V1PdfConfig] + + advanced_options : typing.Optional[V1AdvancedOptions] + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DetectFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/file", + method="POST", + json={ + "file": file, + "data_format": data_format, + "input_type": input_type, + "vault_id": vault_id, + "session_id": session_id, + "restrict_entity_types": restrict_entity_types, + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "return_entities": return_entities, + "accuracy": accuracy, + "audio": convert_and_respect_annotation_metadata( + object_=audio, annotation=V1AudioConfig, direction="write" + ), + "image": convert_and_respect_annotation_metadata( + object_=image, annotation=V1ImageOptions, direction="write" + ), + "pdf": convert_and_respect_annotation_metadata(object_=pdf, annotation=V1PdfConfig, direction="write"), + "advanced_options": convert_and_respect_annotation_metadata( + object_=advanced_options, annotation=V1AdvancedOptions, direction="write" + ), + "deidentify_token_format": deidentify_token_format, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectFileResponse, + parse_obj_as( + type_=V1DetectFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def detect_service_detect_status( + self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[V1DetectStatusResponse]: + """ + Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. + + Parameters + ---------- + id : str + ID of the deidentification request. + + vault_id : typing.Optional[str] + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DetectStatusResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/detect/status/{jsonable_encoder(id)}", + method="GET", + params={ + "vault_id": vault_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectStatusResponse, + parse_obj_as( + type_=V1DetectStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def detect_service_detect_text( + self, + *, + text: str, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + store_entities: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[V1DetectTextResponse]: + """ + Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. + + Parameters + ---------- + text : str + Data to deidentify. + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + advanced_options : typing.Optional[V1AdvancedOptions] + + store_entities : typing.Optional[bool] + Indicates whether entities should be stored in the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[V1DetectTextResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/text", + method="POST", + json={ + "text": text, + "vault_id": vault_id, + "session_id": session_id, + "restrict_entity_types": restrict_entity_types, + "deidentify_token_format": deidentify_token_format, + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "return_entities": return_entities, + "accuracy": accuracy, + "advanced_options": convert_and_respect_annotation_metadata( + object_=advanced_options, annotation=V1AdvancedOptions, direction="write" + ), + "store_entities": store_entities, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectTextResponse, + parse_obj_as( + type_=V1DetectTextResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawDeprecatedClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def detect_service_detect_file_input( + self, + *, + file: str, + data_format: V1FileDataFormat, + input_type: DetectFileRequestDataType, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + audio: typing.Optional[V1AudioConfig] = OMIT, + image: typing.Optional[V1ImageOptions] = OMIT, + pdf: typing.Optional[V1PdfConfig] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1DetectFileResponse]: + """ + Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. + + Parameters + ---------- + file : str + Path of the file or base64-encoded data that has to be processed. + + data_format : V1FileDataFormat + + input_type : DetectFileRequestDataType + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + audio : typing.Optional[V1AudioConfig] + + image : typing.Optional[V1ImageOptions] + + pdf : typing.Optional[V1PdfConfig] + + advanced_options : typing.Optional[V1AdvancedOptions] + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DetectFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/file", + method="POST", + json={ + "file": file, + "data_format": data_format, + "input_type": input_type, + "vault_id": vault_id, + "session_id": session_id, + "restrict_entity_types": restrict_entity_types, + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "return_entities": return_entities, + "accuracy": accuracy, + "audio": convert_and_respect_annotation_metadata( + object_=audio, annotation=V1AudioConfig, direction="write" + ), + "image": convert_and_respect_annotation_metadata( + object_=image, annotation=V1ImageOptions, direction="write" + ), + "pdf": convert_and_respect_annotation_metadata(object_=pdf, annotation=V1PdfConfig, direction="write"), + "advanced_options": convert_and_respect_annotation_metadata( + object_=advanced_options, annotation=V1AdvancedOptions, direction="write" + ), + "deidentify_token_format": deidentify_token_format, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectFileResponse, + parse_obj_as( + type_=V1DetectFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def detect_service_detect_status( + self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[V1DetectStatusResponse]: + """ + Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. + + Parameters + ---------- + id : str + ID of the deidentification request. + + vault_id : typing.Optional[str] + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DetectStatusResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/detect/status/{jsonable_encoder(id)}", + method="GET", + params={ + "vault_id": vault_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectStatusResponse, + parse_obj_as( + type_=V1DetectStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def detect_service_detect_text( + self, + *, + text: str, + vault_id: str, + session_id: typing.Optional[str] = OMIT, + restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, + return_entities: typing.Optional[bool] = OMIT, + accuracy: typing.Optional[DetectDataAccuracy] = OMIT, + advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, + store_entities: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[V1DetectTextResponse]: + """ + Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. + + Parameters + ---------- + text : str + Data to deidentify. + + vault_id : str + ID of the vault. + + session_id : typing.Optional[str] + Will give a handle to delete the tokens generated during a specific interaction. + + restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] + Entities to detect and deidentify. + + deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to ignore when detecting entities. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + + return_entities : typing.Optional[bool] + If `true`, returns the details for the detected entities. + + accuracy : typing.Optional[DetectDataAccuracy] + + advanced_options : typing.Optional[V1AdvancedOptions] + + store_entities : typing.Optional[bool] + Indicates whether entities should be stored in the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[V1DetectTextResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/text", + method="POST", + json={ + "text": text, + "vault_id": vault_id, + "session_id": session_id, + "restrict_entity_types": restrict_entity_types, + "deidentify_token_format": deidentify_token_format, + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "return_entities": return_entities, + "accuracy": accuracy, + "advanced_options": convert_and_respect_annotation_metadata( + object_=advanced_options, annotation=V1AdvancedOptions, direction="write" + ), + "store_entities": store_entities, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + V1DetectTextResponse, + parse_obj_as( + type_=V1DetectTextResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/errors/__init__.py b/skyflow/generated/rest/errors/__init__.py index fdf6196c..27c36553 100644 --- a/skyflow/generated/rest/errors/__init__.py +++ b/skyflow/generated/rest/errors/__init__.py @@ -3,7 +3,8 @@ # isort: skip_file from .bad_request_error import BadRequestError +from .internal_server_error import InternalServerError from .not_found_error import NotFoundError from .unauthorized_error import UnauthorizedError -__all__ = ["BadRequestError", "NotFoundError", "UnauthorizedError"] +__all__ = ["BadRequestError", "InternalServerError", "NotFoundError", "UnauthorizedError"] diff --git a/skyflow/generated/rest/errors/bad_request_error.py b/skyflow/generated/rest/errors/bad_request_error.py index c5d0db48..baf5be4f 100644 --- a/skyflow/generated/rest/errors/bad_request_error.py +++ b/skyflow/generated/rest/errors/bad_request_error.py @@ -6,9 +6,5 @@ class BadRequestError(ApiError): - def __init__( - self, - body: typing.Dict[str, typing.Optional[typing.Any]], - headers: typing.Optional[typing.Dict[str, str]] = None, - ): + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=400, headers=headers, body=body) diff --git a/skyflow/generated/rest/errors/internal_server_error.py b/skyflow/generated/rest/errors/internal_server_error.py new file mode 100644 index 00000000..d7a796c6 --- /dev/null +++ b/skyflow/generated/rest/errors/internal_server_error.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ..core.api_error import ApiError +from ..types.error_response import ErrorResponse + + +class InternalServerError(ApiError): + def __init__(self, body: ErrorResponse, headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=500, headers=headers, body=body) diff --git a/skyflow/generated/rest/errors/not_found_error.py b/skyflow/generated/rest/errors/not_found_error.py index 66307415..dcd60e38 100644 --- a/skyflow/generated/rest/errors/not_found_error.py +++ b/skyflow/generated/rest/errors/not_found_error.py @@ -6,9 +6,5 @@ class NotFoundError(ApiError): - def __init__( - self, - body: typing.Dict[str, typing.Optional[typing.Any]], - headers: typing.Optional[typing.Dict[str, str]] = None, - ): + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=404, headers=headers, body=body) diff --git a/skyflow/generated/rest/errors/unauthorized_error.py b/skyflow/generated/rest/errors/unauthorized_error.py index 3d58c2e6..c83b25c2 100644 --- a/skyflow/generated/rest/errors/unauthorized_error.py +++ b/skyflow/generated/rest/errors/unauthorized_error.py @@ -6,9 +6,5 @@ class UnauthorizedError(ApiError): - def __init__( - self, - body: typing.Dict[str, typing.Optional[typing.Any]], - headers: typing.Optional[typing.Dict[str, str]] = None, - ): + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=401, headers=headers, body=body) diff --git a/skyflow/generated/rest/files/__init__.py b/skyflow/generated/rest/files/__init__.py new file mode 100644 index 00000000..1b10a15a --- /dev/null +++ b/skyflow/generated/rest/files/__init__.py @@ -0,0 +1,45 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +from .types import ( + DeidentifyAudioRequestFile, + DeidentifyAudioRequestFileDataFormat, + DeidentifyAudioRequestOutputTranscription, + DeidentifyDocumentRequestFile, + DeidentifyDocumentRequestFileDataFormat, + DeidentifyFileRequestFile, + DeidentifyFileRequestFileDataFormat, + DeidentifyImageRequestFile, + DeidentifyImageRequestFileDataFormat, + DeidentifyImageRequestMaskingMethod, + DeidentifyPdfRequestFile, + DeidentifyPresentationRequestFile, + DeidentifyPresentationRequestFileDataFormat, + DeidentifySpreadsheetRequestFile, + DeidentifySpreadsheetRequestFileDataFormat, + DeidentifyStructuredTextRequestFile, + DeidentifyStructuredTextRequestFileDataFormat, + DeidentifyTextRequestFile, +) + +__all__ = [ + "DeidentifyAudioRequestFile", + "DeidentifyAudioRequestFileDataFormat", + "DeidentifyAudioRequestOutputTranscription", + "DeidentifyDocumentRequestFile", + "DeidentifyDocumentRequestFileDataFormat", + "DeidentifyFileRequestFile", + "DeidentifyFileRequestFileDataFormat", + "DeidentifyImageRequestFile", + "DeidentifyImageRequestFileDataFormat", + "DeidentifyImageRequestMaskingMethod", + "DeidentifyPdfRequestFile", + "DeidentifyPresentationRequestFile", + "DeidentifyPresentationRequestFileDataFormat", + "DeidentifySpreadsheetRequestFile", + "DeidentifySpreadsheetRequestFileDataFormat", + "DeidentifyStructuredTextRequestFile", + "DeidentifyStructuredTextRequestFileDataFormat", + "DeidentifyTextRequestFile", +] diff --git a/skyflow/generated/rest/files/client.py b/skyflow/generated/rest/files/client.py new file mode 100644 index 00000000..913ccd59 --- /dev/null +++ b/skyflow/generated/rest/files/client.py @@ -0,0 +1,1547 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.allow_regex import AllowRegex +from ..types.deidentify_file_response import DeidentifyFileResponse +from ..types.deidentify_status_response import DeidentifyStatusResponse +from ..types.entity_types import EntityTypes +from ..types.resource_id import ResourceId +from ..types.restrict_regex import RestrictRegex +from ..types.token_type_without_vault import TokenTypeWithoutVault +from ..types.transformations import Transformations +from ..types.uuid_ import Uuid +from ..types.vault_id import VaultId +from .raw_client import AsyncRawFilesClient, RawFilesClient +from .types.deidentify_audio_request_file import DeidentifyAudioRequestFile +from .types.deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription +from .types.deidentify_document_request_file import DeidentifyDocumentRequestFile +from .types.deidentify_file_request_file import DeidentifyFileRequestFile +from .types.deidentify_image_request_file import DeidentifyImageRequestFile +from .types.deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod +from .types.deidentify_pdf_request_file import DeidentifyPdfRequestFile +from .types.deidentify_presentation_request_file import DeidentifyPresentationRequestFile +from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile +from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile +from .types.deidentify_text_request_file import DeidentifyTextRequestFile + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class FilesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._raw_client = RawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawFilesClient + """ + return self._raw_client + + def deidentify_file( + self, + *, + vault_id: VaultId, + file: DeidentifyFileRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a file. This operation includes options applicable to all supported file types.

          For more specific options, see the category-specific operations (like De-identify Document) and the file type-specific opertions (like De-identify PDF). + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyFileRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyFileRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_file( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyFileRequestFile( + base_64="Zm9vYmFy", + data_format="txt", + ), + ) + """ + _response = self._raw_client.deidentify_file( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_document( + self, + *, + vault_id: VaultId, + file: DeidentifyDocumentRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyDocumentRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyDocumentRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_document( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyDocumentRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="docx", + ), + ) + """ + _response = self._raw_client.deidentify_document( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_pdf( + self, + *, + vault_id: VaultId, + file: DeidentifyPdfRequestFile, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPdfRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + density : typing.Optional[int] + Pixel density at which to process the PDF file. + + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyPdfRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_pdf( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyPdfRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + ), + ) + """ + _response = self._raw_client.deidentify_pdf( + vault_id=vault_id, + file=file, + density=density, + max_resolution=max_resolution, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_image( + self, + *, + vault_id: VaultId, + file: DeidentifyImageRequestFile, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyImageRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes OCR text output in the response. + + masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyImageRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_image( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyImageRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="jpg", + ), + ) + """ + _response = self._raw_client.deidentify_image( + vault_id=vault_id, + file=file, + output_processed_image=output_processed_image, + output_ocr_text=output_ocr_text, + masking_method=masking_method, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_text( + self, + *, + vault_id: VaultId, + file: DeidentifyTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyTextRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_text( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyTextRequestFile( + base_64="Zm9vYmFy", + ), + ) + """ + _response = self._raw_client.deidentify_text( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_structured_text( + self, + *, + vault_id: VaultId, + file: DeidentifyStructuredTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyStructuredTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyStructuredTextRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_structured_text( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyStructuredTextRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="json", + ), + ) + """ + _response = self._raw_client.deidentify_structured_text( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_spreadsheet( + self, + *, + vault_id: VaultId, + file: DeidentifySpreadsheetRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a spreadsheet file. This operation includes options applicable to all supported spreadsheet file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifySpreadsheetRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifySpreadsheetRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_spreadsheet( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifySpreadsheetRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="csv", + ), + ) + """ + _response = self._raw_client.deidentify_spreadsheet( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_presentation( + self, + *, + vault_id: VaultId, + file: DeidentifyPresentationRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPresentationRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyPresentationRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_presentation( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyPresentationRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="pptx", + ), + ) + """ + _response = self._raw_client.deidentify_presentation( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def deidentify_audio( + self, + *, + vault_id: VaultId, + file: DeidentifyAudioRequestFile, + output_processed_audio: typing.Optional[bool] = OMIT, + output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, + bleep_gain: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[float] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyAudioRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_audio : typing.Optional[bool] + If `true`, includes processed audio file in the response. + + output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] + Type of transcription to output. + + bleep_gain : typing.Optional[float] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + bleep_frequency : typing.Optional[float] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import DeidentifyAudioRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.deidentify_audio( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyAudioRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="mp3", + ), + ) + """ + _response = self._raw_client.deidentify_audio( + vault_id=vault_id, + file=file, + output_processed_audio=output_processed_audio, + output_transcription=output_transcription, + bleep_gain=bleep_gain, + bleep_frequency=bleep_frequency, + bleep_start_padding=bleep_start_padding, + bleep_stop_padding=bleep_stop_padding, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def get_run( + self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None + ) -> DeidentifyStatusResponse: + """ + Returns the status of the detect run. + + Parameters + ---------- + run_id : Uuid + ID of the detect run. + + vault_id : ResourceId + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyStatusResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.get_run( + run_id="run_id", + vault_id="vault_id", + ) + """ + _response = self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) + return _response.data + + +class AsyncFilesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._raw_client = AsyncRawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawFilesClient + """ + return self._raw_client + + async def deidentify_file( + self, + *, + vault_id: VaultId, + file: DeidentifyFileRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a file. This operation includes options applicable to all supported file types.

          For more specific options, see the category-specific operations (like De-identify Document) and the file type-specific opertions (like De-identify PDF). + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyFileRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyFileRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_file( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyFileRequestFile( + base_64="Zm9vYmFy", + data_format="txt", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_file( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_document( + self, + *, + vault_id: VaultId, + file: DeidentifyDocumentRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyDocumentRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyDocumentRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_document( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyDocumentRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="docx", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_document( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_pdf( + self, + *, + vault_id: VaultId, + file: DeidentifyPdfRequestFile, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPdfRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + density : typing.Optional[int] + Pixel density at which to process the PDF file. + + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyPdfRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_pdf( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyPdfRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_pdf( + vault_id=vault_id, + file=file, + density=density, + max_resolution=max_resolution, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_image( + self, + *, + vault_id: VaultId, + file: DeidentifyImageRequestFile, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyImageRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes OCR text output in the response. + + masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyImageRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_image( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyImageRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="jpg", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_image( + vault_id=vault_id, + file=file, + output_processed_image=output_processed_image, + output_ocr_text=output_ocr_text, + masking_method=masking_method, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_text( + self, + *, + vault_id: VaultId, + file: DeidentifyTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyTextRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_text( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyTextRequestFile( + base_64="Zm9vYmFy", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_text( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_structured_text( + self, + *, + vault_id: VaultId, + file: DeidentifyStructuredTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyStructuredTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyStructuredTextRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_structured_text( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyStructuredTextRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="json", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_structured_text( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_spreadsheet( + self, + *, + vault_id: VaultId, + file: DeidentifySpreadsheetRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a spreadsheet file. This operation includes options applicable to all supported spreadsheet file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifySpreadsheetRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifySpreadsheetRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_spreadsheet( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifySpreadsheetRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="csv", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_spreadsheet( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_presentation( + self, + *, + vault_id: VaultId, + file: DeidentifyPresentationRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPresentationRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyPresentationRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_presentation( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyPresentationRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="pptx", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_presentation( + vault_id=vault_id, + file=file, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def deidentify_audio( + self, + *, + vault_id: VaultId, + file: DeidentifyAudioRequestFile, + output_processed_audio: typing.Optional[bool] = OMIT, + output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, + bleep_gain: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[float] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyFileResponse: + """ + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyAudioRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_audio : typing.Optional[bool] + If `true`, includes processed audio file in the response. + + output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] + Type of transcription to output. + + bleep_gain : typing.Optional[float] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + bleep_frequency : typing.Optional[float] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import DeidentifyAudioRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.deidentify_audio( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=DeidentifyAudioRequestFile( + base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + data_format="mp3", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_audio( + vault_id=vault_id, + file=file, + output_processed_audio=output_processed_audio, + output_transcription=output_transcription, + bleep_gain=bleep_gain, + bleep_frequency=bleep_frequency, + bleep_start_padding=bleep_start_padding, + bleep_stop_padding=bleep_stop_padding, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def get_run( + self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None + ) -> DeidentifyStatusResponse: + """ + Returns the status of the detect run. + + Parameters + ---------- + run_id : Uuid + ID of the detect run. + + vault_id : ResourceId + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyStatusResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.get_run( + run_id="run_id", + vault_id="vault_id", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) + return _response.data diff --git a/skyflow/generated/rest/files/raw_client.py b/skyflow/generated/rest/files/raw_client.py new file mode 100644 index 00000000..b3193544 --- /dev/null +++ b/skyflow/generated/rest/files/raw_client.py @@ -0,0 +1,2355 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError +from ..errors.not_found_error import NotFoundError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.allow_regex import AllowRegex +from ..types.deidentify_file_response import DeidentifyFileResponse +from ..types.deidentify_status_response import DeidentifyStatusResponse +from ..types.entity_types import EntityTypes +from ..types.error_response import ErrorResponse +from ..types.resource_id import ResourceId +from ..types.restrict_regex import RestrictRegex +from ..types.token_type_without_vault import TokenTypeWithoutVault +from ..types.transformations import Transformations +from ..types.uuid_ import Uuid +from ..types.vault_id import VaultId +from .types.deidentify_audio_request_file import DeidentifyAudioRequestFile +from .types.deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription +from .types.deidentify_document_request_file import DeidentifyDocumentRequestFile +from .types.deidentify_file_request_file import DeidentifyFileRequestFile +from .types.deidentify_image_request_file import DeidentifyImageRequestFile +from .types.deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod +from .types.deidentify_pdf_request_file import DeidentifyPdfRequestFile +from .types.deidentify_presentation_request_file import DeidentifyPresentationRequestFile +from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile +from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile +from .types.deidentify_text_request_file import DeidentifyTextRequestFile + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawFilesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def deidentify_file( + self, + *, + vault_id: VaultId, + file: DeidentifyFileRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a file. This operation includes options applicable to all supported file types.

          For more specific options, see the category-specific operations (like De-identify Document) and the file type-specific opertions (like De-identify PDF). + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyFileRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyFileRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_document( + self, + *, + vault_id: VaultId, + file: DeidentifyDocumentRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyDocumentRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/document", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_pdf( + self, + *, + vault_id: VaultId, + file: DeidentifyPdfRequestFile, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPdfRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + density : typing.Optional[int] + Pixel density at which to process the PDF file. + + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/document/pdf", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyPdfRequestFile, direction="write" + ), + "density": density, + "max_resolution": max_resolution, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_image( + self, + *, + vault_id: VaultId, + file: DeidentifyImageRequestFile, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyImageRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes OCR text output in the response. + + masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/image", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyImageRequestFile, direction="write" + ), + "output_processed_image": output_processed_image, + "output_ocr_text": output_ocr_text, + "masking_method": masking_method, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_text( + self, + *, + vault_id: VaultId, + file: DeidentifyTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/text", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyTextRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_structured_text( + self, + *, + vault_id: VaultId, + file: DeidentifyStructuredTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyStructuredTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/structured_text", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_spreadsheet( + self, + *, + vault_id: VaultId, + file: DeidentifySpreadsheetRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a spreadsheet file. This operation includes options applicable to all supported spreadsheet file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifySpreadsheetRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/spreadsheet", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_presentation( + self, + *, + vault_id: VaultId, + file: DeidentifyPresentationRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPresentationRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/presentation", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def deidentify_audio( + self, + *, + vault_id: VaultId, + file: DeidentifyAudioRequestFile, + output_processed_audio: typing.Optional[bool] = OMIT, + output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, + bleep_gain: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[float] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyAudioRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_audio : typing.Optional[bool] + If `true`, includes processed audio file in the response. + + output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] + Type of transcription to output. + + bleep_gain : typing.Optional[float] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + bleep_frequency : typing.Optional[float] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/audio", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyAudioRequestFile, direction="write" + ), + "output_processed_audio": output_processed_audio, + "output_transcription": output_transcription, + "bleep_gain": bleep_gain, + "bleep_frequency": bleep_frequency, + "bleep_start_padding": bleep_start_padding, + "bleep_stop_padding": bleep_stop_padding, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get_run( + self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DeidentifyStatusResponse]: + """ + Returns the status of the detect run. + + Parameters + ---------- + run_id : Uuid + ID of the detect run. + + vault_id : ResourceId + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyStatusResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/detect/runs/{jsonable_encoder(run_id)}", + method="GET", + params={ + "vault_id": vault_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyStatusResponse, + parse_obj_as( + type_=DeidentifyStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawFilesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def deidentify_file( + self, + *, + vault_id: VaultId, + file: DeidentifyFileRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a file. This operation includes options applicable to all supported file types.

          For more specific options, see the category-specific operations (like De-identify Document) and the file type-specific opertions (like De-identify PDF). + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyFileRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyFileRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_document( + self, + *, + vault_id: VaultId, + file: DeidentifyDocumentRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyDocumentRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/document", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_pdf( + self, + *, + vault_id: VaultId, + file: DeidentifyPdfRequestFile, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPdfRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + density : typing.Optional[int] + Pixel density at which to process the PDF file. + + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/document/pdf", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyPdfRequestFile, direction="write" + ), + "density": density, + "max_resolution": max_resolution, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_image( + self, + *, + vault_id: VaultId, + file: DeidentifyImageRequestFile, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyImageRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes OCR text output in the response. + + masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/image", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyImageRequestFile, direction="write" + ), + "output_processed_image": output_processed_image, + "output_ocr_text": output_ocr_text, + "masking_method": masking_method, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_text( + self, + *, + vault_id: VaultId, + file: DeidentifyTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/text", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyTextRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_structured_text( + self, + *, + vault_id: VaultId, + file: DeidentifyStructuredTextRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyStructuredTextRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/structured_text", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_spreadsheet( + self, + *, + vault_id: VaultId, + file: DeidentifySpreadsheetRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a spreadsheet file. This operation includes options applicable to all supported spreadsheet file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifySpreadsheetRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/spreadsheet", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_presentation( + self, + *, + vault_id: VaultId, + file: DeidentifyPresentationRequestFile, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyPresentationRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/presentation", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" + ), + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def deidentify_audio( + self, + *, + vault_id: VaultId, + file: DeidentifyAudioRequestFile, + output_processed_audio: typing.Optional[bool] = OMIT, + output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, + bleep_gain: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[float] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyFileResponse]: + """ + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + + Parameters + ---------- + vault_id : VaultId + + file : DeidentifyAudioRequestFile + File to de-identify. Files are specified as Base64-encoded data. + + output_processed_audio : typing.Optional[bool] + If `true`, includes processed audio file in the response. + + output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] + Type of transcription to output. + + bleep_gain : typing.Optional[float] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + bleep_frequency : typing.Optional[float] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenTypeWithoutVault] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/file/audio", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=DeidentifyAudioRequestFile, direction="write" + ), + "output_processed_audio": output_processed_audio, + "output_transcription": output_transcription, + "bleep_gain": bleep_gain, + "bleep_frequency": bleep_frequency, + "bleep_start_padding": bleep_start_padding, + "bleep_stop_padding": bleep_stop_padding, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyFileResponse, + parse_obj_as( + type_=DeidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get_run( + self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DeidentifyStatusResponse]: + """ + Returns the status of the detect run. + + Parameters + ---------- + run_id : Uuid + ID of the detect run. + + vault_id : ResourceId + ID of the vault. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyStatusResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/detect/runs/{jsonable_encoder(run_id)}", + method="GET", + params={ + "vault_id": vault_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyStatusResponse, + parse_obj_as( + type_=DeidentifyStatusResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/files/types/__init__.py b/skyflow/generated/rest/files/types/__init__.py new file mode 100644 index 00000000..e6343d60 --- /dev/null +++ b/skyflow/generated/rest/files/types/__init__.py @@ -0,0 +1,43 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +from .deidentify_audio_request_file import DeidentifyAudioRequestFile +from .deidentify_audio_request_file_data_format import DeidentifyAudioRequestFileDataFormat +from .deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription +from .deidentify_document_request_file import DeidentifyDocumentRequestFile +from .deidentify_document_request_file_data_format import DeidentifyDocumentRequestFileDataFormat +from .deidentify_file_request_file import DeidentifyFileRequestFile +from .deidentify_file_request_file_data_format import DeidentifyFileRequestFileDataFormat +from .deidentify_image_request_file import DeidentifyImageRequestFile +from .deidentify_image_request_file_data_format import DeidentifyImageRequestFileDataFormat +from .deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod +from .deidentify_pdf_request_file import DeidentifyPdfRequestFile +from .deidentify_presentation_request_file import DeidentifyPresentationRequestFile +from .deidentify_presentation_request_file_data_format import DeidentifyPresentationRequestFileDataFormat +from .deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile +from .deidentify_spreadsheet_request_file_data_format import DeidentifySpreadsheetRequestFileDataFormat +from .deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile +from .deidentify_structured_text_request_file_data_format import DeidentifyStructuredTextRequestFileDataFormat +from .deidentify_text_request_file import DeidentifyTextRequestFile + +__all__ = [ + "DeidentifyAudioRequestFile", + "DeidentifyAudioRequestFileDataFormat", + "DeidentifyAudioRequestOutputTranscription", + "DeidentifyDocumentRequestFile", + "DeidentifyDocumentRequestFileDataFormat", + "DeidentifyFileRequestFile", + "DeidentifyFileRequestFileDataFormat", + "DeidentifyImageRequestFile", + "DeidentifyImageRequestFileDataFormat", + "DeidentifyImageRequestMaskingMethod", + "DeidentifyPdfRequestFile", + "DeidentifyPresentationRequestFile", + "DeidentifyPresentationRequestFileDataFormat", + "DeidentifySpreadsheetRequestFile", + "DeidentifySpreadsheetRequestFileDataFormat", + "DeidentifyStructuredTextRequestFile", + "DeidentifyStructuredTextRequestFileDataFormat", + "DeidentifyTextRequestFile", +] diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_file.py b/skyflow/generated/rest/files/types/deidentify_audio_request_file.py new file mode 100644 index 00000000..3ea4c16f --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_audio_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_audio_request_file_data_format import DeidentifyAudioRequestFileDataFormat + + +class DeidentifyAudioRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyAudioRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py new file mode 100644 index 00000000..85f60bbb --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyAudioRequestFileDataFormat = typing.Union[typing.Literal["mp3", "wav"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py b/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py new file mode 100644 index 00000000..4588b1d1 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py @@ -0,0 +1,14 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyAudioRequestOutputTranscription = typing.Union[ + typing.Literal[ + "diarized_transcription", + "medical_diarized_transcription", + "medical_transcription", + "plaintext_transcription", + "transcription", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_document_request_file.py b/skyflow/generated/rest/files/types/deidentify_document_request_file.py new file mode 100644 index 00000000..cbf36c59 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_document_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_document_request_file_data_format import DeidentifyDocumentRequestFileDataFormat + + +class DeidentifyDocumentRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyDocumentRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py new file mode 100644 index 00000000..a20f4fd8 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyDocumentRequestFileDataFormat = typing.Union[typing.Literal["doc", "docx", "pdf"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_file.py b/skyflow/generated/rest/files/types/deidentify_file_request_file.py new file mode 100644 index 00000000..3e062bb2 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_file_request_file_data_format import DeidentifyFileRequestFileDataFormat + + +class DeidentifyFileRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyFileRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py new file mode 100644 index 00000000..20581451 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestFileDataFormat = typing.Union[ + typing.Literal[ + "bmp", + "csv", + "doc", + "docx", + "jpeg", + "jpg", + "json", + "mp3", + "pdf", + "png", + "ppt", + "pptx", + "tif", + "tiff", + "txt", + "wav", + "xls", + "xlsx", + "xml", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_file.py b/skyflow/generated/rest/files/types/deidentify_image_request_file.py new file mode 100644 index 00000000..36677964 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_image_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_image_request_file_data_format import DeidentifyImageRequestFileDataFormat + + +class DeidentifyImageRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyImageRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py new file mode 100644 index 00000000..a2ca8f2a --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyImageRequestFileDataFormat = typing.Union[ + typing.Literal["bmp", "jpeg", "jpg", "png", "tif", "tiff"], typing.Any +] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py new file mode 100644 index 00000000..bc0c338c --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackbox", "blur"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_pdf_request_file.py b/skyflow/generated/rest/files/types/deidentify_pdf_request_file.py new file mode 100644 index 00000000..da461fd1 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_pdf_request_file.py @@ -0,0 +1,33 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata + + +class DeidentifyPdfRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: typing.Literal["pdf"] = pydantic.Field(default="pdf") + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py b/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py new file mode 100644 index 00000000..c618ccc1 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_presentation_request_file_data_format import DeidentifyPresentationRequestFileDataFormat + + +class DeidentifyPresentationRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyPresentationRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py new file mode 100644 index 00000000..d09f42f8 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyPresentationRequestFileDataFormat = typing.Union[typing.Literal["ppt", "pptx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py b/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py new file mode 100644 index 00000000..f97e1c03 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_spreadsheet_request_file_data_format import DeidentifySpreadsheetRequestFileDataFormat + + +class DeidentifySpreadsheetRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifySpreadsheetRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py new file mode 100644 index 00000000..20db2856 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifySpreadsheetRequestFileDataFormat = typing.Union[typing.Literal["csv", "xls", "xlsx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py new file mode 100644 index 00000000..aa2d0834 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .deidentify_structured_text_request_file_data_format import DeidentifyStructuredTextRequestFileDataFormat + + +class DeidentifyStructuredTextRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: DeidentifyStructuredTextRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py new file mode 100644 index 00000000..f956fe48 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyStructuredTextRequestFileDataFormat = typing.Union[typing.Literal["json", "xml"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_text_request_file.py b/skyflow/generated/rest/files/types/deidentify_text_request_file.py new file mode 100644 index 00000000..193aa7bd --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_text_request_file.py @@ -0,0 +1,33 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata + + +class DeidentifyTextRequestFile(UniversalBaseModel): + """ + File to de-identify. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to de-identify. + """ + + data_format: typing.Literal["txt"] = pydantic.Field(default="txt") + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/query/client.py b/skyflow/generated/rest/query/client.py index 1f5edd75..28fd454e 100644 --- a/skyflow/generated/rest/query/client.py +++ b/skyflow/generated/rest/query/client.py @@ -55,8 +55,14 @@ def query_service_execute_query( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.query.query_service_execute_query(vault_id='vaultID', query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.query.query_service_execute_query( + vault_id="vaultID", + query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', + ) """ _response = self._raw_client.query_service_execute_query(vault_id, query=query, request_options=request_options) return _response.data @@ -105,11 +111,22 @@ async def query_service_execute_query( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.query.query_service_execute_query(vault_id='vaultID', query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', ) + await client.query.query_service_execute_query( + vault_id="vaultID", + query='select * from opportunities where id="01010000ade21cded569d43944544ec6"', + ) + + asyncio.run(main()) """ _response = await self._raw_client.query_service_execute_query( diff --git a/skyflow/generated/rest/query/raw_client.py b/skyflow/generated/rest/query/raw_client.py index 897d1e2d..5450e1bf 100644 --- a/skyflow/generated/rest/query/raw_client.py +++ b/skyflow/generated/rest/query/raw_client.py @@ -72,9 +72,9 @@ def query_service_execute_query( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -141,9 +141,9 @@ async def query_service_execute_query( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/records/client.py b/skyflow/generated/rest/records/client.py index 643e2826..1f727bfc 100644 --- a/skyflow/generated/rest/records/client.py +++ b/skyflow/generated/rest/records/client.py @@ -75,14 +75,40 @@ def record_service_batch_operation( Examples -------- - from skyflow import Skyflow - from skyflow import V1BatchRecord - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_batch_operation(vault_id='vaultID', records=[V1BatchRecord(fields={'drivers_license_number': '89867453' - , 'name': 'Connor' - , 'phone_number': '8794523160' - , 'ssn': '143-89-2306' - }, table_name='persons', method="POST", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, download_url=False, upsert='drivers_license_number', ), V1BatchRecord(table_name='persons', method="GET", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, id='f1dbc55c-7c9b-495d-9a36-72bb2b619202', download_url=True, )], ) + from skyflow import Skyflow, V1BatchRecord + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_batch_operation( + vault_id="vaultID", + records=[ + V1BatchRecord( + fields={ + "drivers_license_number": "89867453", + "name": "Connor", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + table_name="persons", + method="POST", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + download_url=False, + upsert="drivers_license_number", + ), + V1BatchRecord( + table_name="persons", + method="GET", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", + download_url=True, + ), + ], + ) """ _response = self._raw_client.record_service_batch_operation( vault_id, records=records, continue_on_error=continue_on_error, byot=byot, request_options=request_options @@ -158,8 +184,14 @@ def record_service_bulk_get_record( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_bulk_get_record(vault_id='vaultID', object_name='objectName', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_bulk_get_record( + vault_id="vaultID", + object_name="objectName", + ) """ _response = self._raw_client.record_service_bulk_get_record( vault_id, @@ -225,18 +257,36 @@ def record_service_insert_record( Examples -------- - from skyflow import Skyflow - from skyflow import V1FieldRecords - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_insert_record(vault_id='vaultID', object_name='objectName', records=[V1FieldRecords(fields={'drivers_license_number': '13456789' - , 'name': 'John' - , 'phone_number': '1236784563' - , 'ssn': '123-45-6789' - }, ), V1FieldRecords(fields={'drivers_license_number': '98765432' - , 'name': 'James' - , 'phone_number': '9876543215' - , 'ssn': '345-45-9876' - }, )], tokenization=True, upsert='drivers_license_number', homogeneous=False, ) + from skyflow import Skyflow, V1FieldRecords + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_insert_record( + vault_id="vaultID", + object_name="objectName", + records=[ + V1FieldRecords( + fields={ + "drivers_license_number": "13456789", + "name": "John", + "phone_number": "1236784563", + "ssn": "123-45-6789", + }, + ), + V1FieldRecords( + fields={ + "drivers_license_number": "98765432", + "name": "James", + "phone_number": "9876543215", + "ssn": "345-45-9876", + }, + ), + ], + tokenization=True, + upsert="drivers_license_number", + homogeneous=False, + ) """ _response = self._raw_client.record_service_insert_record( vault_id, @@ -283,8 +333,18 @@ def record_service_bulk_delete_record( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_bulk_delete_record(vault_id='vaultID', object_name='objectName', skyflow_ids=['51782ea4-91a5-4430-a06d-f4b76efd3d2f', '110ce08f-6059-4874-b1ae-7c6651d286ff'], ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_bulk_delete_record( + vault_id="vaultID", + object_name="objectName", + skyflow_ids=[ + "51782ea4-91a5-4430-a06d-f4b76efd3d2f", + "110ce08f-6059-4874-b1ae-7c6651d286ff", + ], + ) """ _response = self._raw_client.record_service_bulk_delete_record( vault_id, object_name, skyflow_ids=skyflow_ids, request_options=request_options @@ -340,8 +400,15 @@ def record_service_get_record( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_get_record(vault_id='vaultID', object_name='objectName', id='ID', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_get_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) """ _response = self._raw_client.record_service_get_record( vault_id, @@ -397,14 +464,25 @@ def record_service_update_record( Examples -------- - from skyflow import Skyflow - from skyflow import V1FieldRecords - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_update_record(vault_id='vaultID', object_name='objectName', id='ID', record=V1FieldRecords(fields={'drivers_license_number': '89867453' - , 'name': 'Steve Smith' - , 'phone_number': '8794523160' - , 'ssn': '143-89-2306' - }, ), tokenization=True, ) + from skyflow import Skyflow, V1FieldRecords + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_update_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + record=V1FieldRecords( + fields={ + "drivers_license_number": "89867453", + "name": "Steve Smith", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + ), + tokenization=True, + ) """ _response = self._raw_client.record_service_update_record( vault_id, @@ -445,8 +523,15 @@ def record_service_delete_record( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.record_service_delete_record(vault_id='vaultID', object_name='objectName', id='ID', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.record_service_delete_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) """ _response = self._raw_client.record_service_delete_record( vault_id, object_name, id, request_options=request_options @@ -459,7 +544,8 @@ def file_service_upload_file( object_name: str, id: str, *, - file_column_name: typing.Optional[core.File] = OMIT, + file: typing.Optional[core.File] = OMIT, + column_name: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> V1UpdateRecordResponse: """ @@ -476,9 +562,12 @@ def file_service_upload_file( id : str `skyflow_id` of the record. - file_column_name : typing.Optional[core.File] + file : typing.Optional[core.File] See core.File for more documentation + column_name : typing.Optional[str] + Name of the column to store the file in. The column must have a file data type. + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -490,11 +579,18 @@ def file_service_upload_file( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.file_service_upload_file(vault_id='vaultID', object_name='objectName', id='ID', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_upload_file( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) """ _response = self._raw_client.file_service_upload_file( - vault_id, object_name, id, file_column_name=file_column_name, request_options=request_options + vault_id, object_name, id, file=file, column_name=column_name, request_options=request_options ) return _response.data @@ -535,8 +631,16 @@ def file_service_delete_file( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.file_service_delete_file(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_delete_file( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) """ _response = self._raw_client.file_service_delete_file( vault_id, table_name, id, column_name, request_options=request_options @@ -580,8 +684,16 @@ def file_service_get_file_scan_status( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.records.file_service_get_file_scan_status(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.file_service_get_file_scan_status( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) """ _response = self._raw_client.file_service_get_file_scan_status( vault_id, table_name, id, column_name, request_options=request_options @@ -639,16 +751,47 @@ async def record_service_batch_operation( Examples -------- - from skyflow import AsyncSkyflow - from skyflow import V1BatchRecord import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow, V1BatchRecord + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_batch_operation(vault_id='vaultID', records=[V1BatchRecord(fields={'drivers_license_number': '89867453' - , 'name': 'Connor' - , 'phone_number': '8794523160' - , 'ssn': '143-89-2306' - }, table_name='persons', method="POST", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, download_url=False, upsert='drivers_license_number', ), V1BatchRecord(table_name='persons', method="GET", batch_id='persons-12345', redaction="PLAIN_TEXT", tokenization=False, id='f1dbc55c-7c9b-495d-9a36-72bb2b619202', download_url=True, )], ) + await client.records.record_service_batch_operation( + vault_id="vaultID", + records=[ + V1BatchRecord( + fields={ + "drivers_license_number": "89867453", + "name": "Connor", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + table_name="persons", + method="POST", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + download_url=False, + upsert="drivers_license_number", + ), + V1BatchRecord( + table_name="persons", + method="GET", + batch_id="persons-12345", + redaction="PLAIN_TEXT", + tokenization=False, + id="f1dbc55c-7c9b-495d-9a36-72bb2b619202", + download_url=True, + ), + ], + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_batch_operation( @@ -724,11 +867,22 @@ async def record_service_bulk_get_record( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_bulk_get_record(vault_id='vaultID', object_name='objectName', ) + await client.records.record_service_bulk_get_record( + vault_id="vaultID", + object_name="objectName", + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_bulk_get_record( @@ -795,20 +949,43 @@ async def record_service_insert_record( Examples -------- - from skyflow import AsyncSkyflow - from skyflow import V1FieldRecords import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow, V1FieldRecords + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_insert_record(vault_id='vaultID', object_name='objectName', records=[V1FieldRecords(fields={'drivers_license_number': '13456789' - , 'name': 'John' - , 'phone_number': '1236784563' - , 'ssn': '123-45-6789' - }, ), V1FieldRecords(fields={'drivers_license_number': '98765432' - , 'name': 'James' - , 'phone_number': '9876543215' - , 'ssn': '345-45-9876' - }, )], tokenization=True, upsert='drivers_license_number', homogeneous=False, ) + await client.records.record_service_insert_record( + vault_id="vaultID", + object_name="objectName", + records=[ + V1FieldRecords( + fields={ + "drivers_license_number": "13456789", + "name": "John", + "phone_number": "1236784563", + "ssn": "123-45-6789", + }, + ), + V1FieldRecords( + fields={ + "drivers_license_number": "98765432", + "name": "James", + "phone_number": "9876543215", + "ssn": "345-45-9876", + }, + ), + ], + tokenization=True, + upsert="drivers_license_number", + homogeneous=False, + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_insert_record( @@ -855,11 +1032,26 @@ async def record_service_bulk_delete_record( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_bulk_delete_record(vault_id='vaultID', object_name='objectName', skyflow_ids=['51782ea4-91a5-4430-a06d-f4b76efd3d2f', '110ce08f-6059-4874-b1ae-7c6651d286ff'], ) + await client.records.record_service_bulk_delete_record( + vault_id="vaultID", + object_name="objectName", + skyflow_ids=[ + "51782ea4-91a5-4430-a06d-f4b76efd3d2f", + "110ce08f-6059-4874-b1ae-7c6651d286ff", + ], + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_bulk_delete_record( @@ -915,11 +1107,23 @@ async def record_service_get_record( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_get_record(vault_id='vaultID', object_name='objectName', id='ID', ) + await client.records.record_service_get_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_get_record( @@ -976,16 +1180,32 @@ async def record_service_update_record( Examples -------- - from skyflow import AsyncSkyflow - from skyflow import V1FieldRecords import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow, V1FieldRecords + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_update_record(vault_id='vaultID', object_name='objectName', id='ID', record=V1FieldRecords(fields={'drivers_license_number': '89867453' - , 'name': 'Steve Smith' - , 'phone_number': '8794523160' - , 'ssn': '143-89-2306' - }, ), tokenization=True, ) + await client.records.record_service_update_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + record=V1FieldRecords( + fields={ + "drivers_license_number": "89867453", + "name": "Steve Smith", + "phone_number": "8794523160", + "ssn": "143-89-2306", + }, + ), + tokenization=True, + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_update_record( @@ -1026,11 +1246,23 @@ async def record_service_delete_record( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.record_service_delete_record(vault_id='vaultID', object_name='objectName', id='ID', ) + await client.records.record_service_delete_record( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_delete_record( @@ -1044,7 +1276,8 @@ async def file_service_upload_file( object_name: str, id: str, *, - file_column_name: typing.Optional[core.File] = OMIT, + file: typing.Optional[core.File] = OMIT, + column_name: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> V1UpdateRecordResponse: """ @@ -1061,9 +1294,12 @@ async def file_service_upload_file( id : str `skyflow_id` of the record. - file_column_name : typing.Optional[core.File] + file : typing.Optional[core.File] See core.File for more documentation + column_name : typing.Optional[str] + Name of the column to store the file in. The column must have a file data type. + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1074,15 +1310,27 @@ async def file_service_upload_file( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.file_service_upload_file(vault_id='vaultID', object_name='objectName', id='ID', ) + await client.records.file_service_upload_file( + vault_id="vaultID", + object_name="objectName", + id="ID", + ) + + asyncio.run(main()) """ _response = await self._raw_client.file_service_upload_file( - vault_id, object_name, id, file_column_name=file_column_name, request_options=request_options + vault_id, object_name, id, file=file, column_name=column_name, request_options=request_options ) return _response.data @@ -1122,11 +1370,24 @@ async def file_service_delete_file( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.file_service_delete_file(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) + await client.records.file_service_delete_file( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + + asyncio.run(main()) """ _response = await self._raw_client.file_service_delete_file( @@ -1170,11 +1431,24 @@ async def file_service_get_file_scan_status( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.records.file_service_get_file_scan_status(vault_id='vaultID', table_name='tableName', id='ID', column_name='columnName', ) + await client.records.file_service_get_file_scan_status( + vault_id="vaultID", + table_name="tableName", + id="ID", + column_name="columnName", + ) + + asyncio.run(main()) """ _response = await self._raw_client.file_service_get_file_scan_status( diff --git a/skyflow/generated/rest/records/raw_client.py b/skyflow/generated/rest/records/raw_client.py index 55b874c6..e2bfdc92 100644 --- a/skyflow/generated/rest/records/raw_client.py +++ b/skyflow/generated/rest/records/raw_client.py @@ -98,9 +98,9 @@ def record_service_batch_operation( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -207,9 +207,9 @@ def record_service_bulk_get_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -296,9 +296,9 @@ def record_service_insert_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -364,9 +364,9 @@ def record_service_bulk_delete_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -447,9 +447,9 @@ def record_service_get_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -529,9 +529,9 @@ def record_service_update_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -585,9 +585,9 @@ def record_service_delete_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -603,7 +603,8 @@ def file_service_upload_file( object_name: str, id: str, *, - file_column_name: typing.Optional[core.File] = OMIT, + file: typing.Optional[core.File] = OMIT, + column_name: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[V1UpdateRecordResponse]: """ @@ -620,9 +621,12 @@ def file_service_upload_file( id : str `skyflow_id` of the record. - file_column_name : typing.Optional[core.File] + file : typing.Optional[core.File] See core.File for more documentation + column_name : typing.Optional[str] + Name of the column to store the file in. The column must have a file data type. + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -634,12 +638,15 @@ def file_service_upload_file( _response = self._client_wrapper.httpx_client.request( f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", method="POST", - data={}, + data={ + "columnName": column_name, + }, files={ - **({"fileColumnName": file_column_name} if fileColumnName is not None else {}), + **({"file": file} if file is not None else {}), }, request_options=request_options, omit=OMIT, + force_multipart=True, ) try: if 200 <= _response.status_code < 300: @@ -655,9 +662,9 @@ def file_service_upload_file( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -720,9 +727,9 @@ def file_service_delete_file( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -785,9 +792,9 @@ def file_service_get_file_scan_status( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -865,9 +872,9 @@ async def record_service_batch_operation( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -974,9 +981,9 @@ async def record_service_bulk_get_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1063,9 +1070,9 @@ async def record_service_insert_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1131,9 +1138,9 @@ async def record_service_bulk_delete_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1214,9 +1221,9 @@ async def record_service_get_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1296,9 +1303,9 @@ async def record_service_update_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1352,9 +1359,9 @@ async def record_service_delete_record( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1370,7 +1377,8 @@ async def file_service_upload_file( object_name: str, id: str, *, - file_column_name: typing.Optional[core.File] = OMIT, + file: typing.Optional[core.File] = OMIT, + column_name: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[V1UpdateRecordResponse]: """ @@ -1387,9 +1395,12 @@ async def file_service_upload_file( id : str `skyflow_id` of the record. - file_column_name : typing.Optional[core.File] + file : typing.Optional[core.File] See core.File for more documentation + column_name : typing.Optional[str] + Name of the column to store the file in. The column must have a file data type. + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1401,12 +1412,15 @@ async def file_service_upload_file( _response = await self._client_wrapper.httpx_client.request( f"v1/vaults/{jsonable_encoder(vault_id)}/{jsonable_encoder(object_name)}/{jsonable_encoder(id)}/files", method="POST", - data={}, + data={ + "columnName": column_name, + }, files={ - **({"fileColumnName": file_column_name} if fileColumnName is not None else {}), + **({"file": file} if file is not None else {}), }, request_options=request_options, omit=OMIT, + force_multipart=True, ) try: if 200 <= _response.status_code < 300: @@ -1422,9 +1436,9 @@ async def file_service_upload_file( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1487,9 +1501,9 @@ async def file_service_delete_file( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -1552,9 +1566,9 @@ async def file_service_get_file_scan_status( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/strings/__init__.py b/skyflow/generated/rest/strings/__init__.py new file mode 100644 index 00000000..4cabb7fb --- /dev/null +++ b/skyflow/generated/rest/strings/__init__.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +from .types import ReidentifyStringRequestFormat + +__all__ = ["ReidentifyStringRequestFormat"] diff --git a/skyflow/generated/rest/strings/client.py b/skyflow/generated/rest/strings/client.py new file mode 100644 index 00000000..5c71662d --- /dev/null +++ b/skyflow/generated/rest/strings/client.py @@ -0,0 +1,289 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.allow_regex import AllowRegex +from ..types.deidentify_string_response import DeidentifyStringResponse +from ..types.entity_types import EntityTypes +from ..types.reidentify_string_response import ReidentifyStringResponse +from ..types.restrict_regex import RestrictRegex +from ..types.token_type import TokenType +from ..types.transformations import Transformations +from ..types.vault_id import VaultId +from .raw_client import AsyncRawStringsClient, RawStringsClient +from .types.reidentify_string_request_format import ReidentifyStringRequestFormat + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class StringsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._raw_client = RawStringsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawStringsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawStringsClient + """ + return self._raw_client + + def deidentify_string( + self, + *, + vault_id: VaultId, + text: str, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenType] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyStringResponse: + """ + De-identifies sensitive data from a string. + + Parameters + ---------- + vault_id : VaultId + + text : str + String to de-identify. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenType] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyStringResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.strings.deidentify_string( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + text="My name is John Doe, and my email is johndoe@acme.com.", + ) + """ + _response = self._raw_client.deidentify_string( + vault_id=vault_id, + text=text, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + def reidentify_string( + self, + *, + text: str, + vault_id: str, + format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyStringResponse: + """ + Re-identifies tokens in a string. + + Parameters + ---------- + text : str + String to re-identify. + + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[ReidentifyStringRequestFormat] + Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ReidentifyStringResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.strings.reidentify_string( + text="My name is [NAME_1], and my email is [EMAIL_1].", + vault_id="1ad6db07-8405-46cf-9a1e-db148ff9f4c5", + ) + """ + _response = self._raw_client.reidentify_string( + text=text, vault_id=vault_id, format=format, request_options=request_options + ) + return _response.data + + +class AsyncStringsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._raw_client = AsyncRawStringsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawStringsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawStringsClient + """ + return self._raw_client + + async def deidentify_string( + self, + *, + vault_id: VaultId, + text: str, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenType] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> DeidentifyStringResponse: + """ + De-identifies sensitive data from a string. + + Parameters + ---------- + vault_id : VaultId + + text : str + String to de-identify. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenType] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeidentifyStringResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.strings.deidentify_string( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + text="My name is John Doe, and my email is johndoe@acme.com.", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.deidentify_string( + vault_id=vault_id, + text=text, + entity_types=entity_types, + token_type=token_type, + allow_regex=allow_regex, + restrict_regex=restrict_regex, + transformations=transformations, + request_options=request_options, + ) + return _response.data + + async def reidentify_string( + self, + *, + text: str, + vault_id: str, + format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyStringResponse: + """ + Re-identifies tokens in a string. + + Parameters + ---------- + text : str + String to re-identify. + + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[ReidentifyStringRequestFormat] + Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ReidentifyStringResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.strings.reidentify_string( + text="My name is [NAME_1], and my email is [EMAIL_1].", + vault_id="1ad6db07-8405-46cf-9a1e-db148ff9f4c5", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.reidentify_string( + text=text, vault_id=vault_id, format=format, request_options=request_options + ) + return _response.data diff --git a/skyflow/generated/rest/strings/raw_client.py b/skyflow/generated/rest/strings/raw_client.py new file mode 100644 index 00000000..ad67433a --- /dev/null +++ b/skyflow/generated/rest/strings/raw_client.py @@ -0,0 +1,445 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.allow_regex import AllowRegex +from ..types.deidentify_string_response import DeidentifyStringResponse +from ..types.entity_types import EntityTypes +from ..types.error_response import ErrorResponse +from ..types.reidentify_string_response import ReidentifyStringResponse +from ..types.restrict_regex import RestrictRegex +from ..types.token_type import TokenType +from ..types.transformations import Transformations +from ..types.vault_id import VaultId +from .types.reidentify_string_request_format import ReidentifyStringRequestFormat + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawStringsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def deidentify_string( + self, + *, + vault_id: VaultId, + text: str, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenType] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DeidentifyStringResponse]: + """ + De-identifies sensitive data from a string. + + Parameters + ---------- + vault_id : VaultId + + text : str + String to de-identify. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenType] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DeidentifyStringResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/string", + method="POST", + json={ + "vault_id": vault_id, + "text": text, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenType, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyStringResponse, + parse_obj_as( + type_=DeidentifyStringResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def reidentify_string( + self, + *, + text: str, + vault_id: str, + format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ReidentifyStringResponse]: + """ + Re-identifies tokens in a string. + + Parameters + ---------- + text : str + String to re-identify. + + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[ReidentifyStringRequestFormat] + Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ReidentifyStringResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/reidentify/string", + method="POST", + json={ + "text": text, + "vault_id": vault_id, + "format": convert_and_respect_annotation_metadata( + object_=format, annotation=ReidentifyStringRequestFormat, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ReidentifyStringResponse, + parse_obj_as( + type_=ReidentifyStringResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawStringsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def deidentify_string( + self, + *, + vault_id: VaultId, + text: str, + entity_types: typing.Optional[EntityTypes] = OMIT, + token_type: typing.Optional[TokenType] = OMIT, + allow_regex: typing.Optional[AllowRegex] = OMIT, + restrict_regex: typing.Optional[RestrictRegex] = OMIT, + transformations: typing.Optional[Transformations] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DeidentifyStringResponse]: + """ + De-identifies sensitive data from a string. + + Parameters + ---------- + vault_id : VaultId + + text : str + String to de-identify. + + entity_types : typing.Optional[EntityTypes] + + token_type : typing.Optional[TokenType] + + allow_regex : typing.Optional[AllowRegex] + + restrict_regex : typing.Optional[RestrictRegex] + + transformations : typing.Optional[Transformations] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DeidentifyStringResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/deidentify/string", + method="POST", + json={ + "vault_id": vault_id, + "text": text, + "entity_types": entity_types, + "token_type": convert_and_respect_annotation_metadata( + object_=token_type, annotation=TokenType, direction="write" + ), + "allow_regex": allow_regex, + "restrict_regex": restrict_regex, + "transformations": convert_and_respect_annotation_metadata( + object_=transformations, annotation=Transformations, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DeidentifyStringResponse, + parse_obj_as( + type_=DeidentifyStringResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def reidentify_string( + self, + *, + text: str, + vault_id: str, + format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ReidentifyStringResponse]: + """ + Re-identifies tokens in a string. + + Parameters + ---------- + text : str + String to re-identify. + + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[ReidentifyStringRequestFormat] + Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ReidentifyStringResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/reidentify/string", + method="POST", + json={ + "text": text, + "vault_id": vault_id, + "format": convert_and_respect_annotation_metadata( + object_=format, annotation=ReidentifyStringRequestFormat, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ReidentifyStringResponse, + parse_obj_as( + type_=ReidentifyStringResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/strings/types/__init__.py b/skyflow/generated/rest/strings/types/__init__.py new file mode 100644 index 00000000..97d06583 --- /dev/null +++ b/skyflow/generated/rest/strings/types/__init__.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +from .reidentify_string_request_format import ReidentifyStringRequestFormat + +__all__ = ["ReidentifyStringRequestFormat"] diff --git a/skyflow/generated/rest/strings/types/reidentify_string_request_format.py b/skyflow/generated/rest/strings/types/reidentify_string_request_format.py new file mode 100644 index 00000000..bfda392c --- /dev/null +++ b/skyflow/generated/rest/strings/types/reidentify_string_request_format.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.entity_type import EntityType + + +class ReidentifyStringRequestFormat(UniversalBaseModel): + """ + Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + """ + + redacted: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to fully redact. + """ + + masked: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to mask. + """ + + plaintext: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to return in plaintext. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/tokens/client.py b/skyflow/generated/rest/tokens/client.py index d7861277..5518932c 100644 --- a/skyflow/generated/rest/tokens/client.py +++ b/skyflow/generated/rest/tokens/client.py @@ -65,10 +65,25 @@ def record_service_detokenize( Examples -------- - from skyflow import Skyflow - from skyflow import V1DetokenizeRecordRequest - client = Skyflow(token="YOUR_TOKEN", ) - client.tokens.record_service_detokenize(vault_id='vaultID', detokenization_parameters=[V1DetokenizeRecordRequest(token='afbd1074-51c1-4a16-9eee-e2c0ecb52125', redaction="PLAIN_TEXT", ), V1DetokenizeRecordRequest(token='05383487-fcae-42e5-a48e-5bd62a51af12', redaction="DEFAULT", )], download_url=False, ) + from skyflow import Skyflow, V1DetokenizeRecordRequest + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.tokens.record_service_detokenize( + vault_id="vaultID", + detokenization_parameters=[ + V1DetokenizeRecordRequest( + token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", + redaction="PLAIN_TEXT", + ), + V1DetokenizeRecordRequest( + token="05383487-fcae-42e5-a48e-5bd62a51af12", + redaction="DEFAULT", + ), + ], + download_url=False, + ) """ _response = self._raw_client.record_service_detokenize( vault_id, @@ -108,8 +123,13 @@ def record_service_tokenize( Examples -------- from skyflow import Skyflow - client = Skyflow(token="YOUR_TOKEN", ) - client.tokens.record_service_tokenize(vault_id='vaultID', ) + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.tokens.record_service_tokenize( + vault_id="vaultID", + ) """ _response = self._raw_client.record_service_tokenize( vault_id, tokenization_parameters=tokenization_parameters, request_options=request_options @@ -168,12 +188,32 @@ async def record_service_detokenize( Examples -------- - from skyflow import AsyncSkyflow - from skyflow import V1DetokenizeRecordRequest import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow, V1DetokenizeRecordRequest + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.tokens.record_service_detokenize(vault_id='vaultID', detokenization_parameters=[V1DetokenizeRecordRequest(token='afbd1074-51c1-4a16-9eee-e2c0ecb52125', redaction="PLAIN_TEXT", ), V1DetokenizeRecordRequest(token='05383487-fcae-42e5-a48e-5bd62a51af12', redaction="DEFAULT", )], download_url=False, ) + await client.tokens.record_service_detokenize( + vault_id="vaultID", + detokenization_parameters=[ + V1DetokenizeRecordRequest( + token="afbd1074-51c1-4a16-9eee-e2c0ecb52125", + redaction="PLAIN_TEXT", + ), + V1DetokenizeRecordRequest( + token="05383487-fcae-42e5-a48e-5bd62a51af12", + redaction="DEFAULT", + ), + ], + download_url=False, + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_detokenize( @@ -213,11 +253,21 @@ async def record_service_tokenize( Examples -------- - from skyflow import AsyncSkyflow import asyncio - client = AsyncSkyflow(token="YOUR_TOKEN", ) + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + async def main() -> None: - await client.tokens.record_service_tokenize(vault_id='vaultID', ) + await client.tokens.record_service_tokenize( + vault_id="vaultID", + ) + + asyncio.run(main()) """ _response = await self._raw_client.record_service_tokenize( diff --git a/skyflow/generated/rest/tokens/raw_client.py b/skyflow/generated/rest/tokens/raw_client.py index 057b9f68..4741631d 100644 --- a/skyflow/generated/rest/tokens/raw_client.py +++ b/skyflow/generated/rest/tokens/raw_client.py @@ -90,9 +90,9 @@ def record_service_detokenize( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -158,9 +158,9 @@ def record_service_tokenize( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -241,9 +241,9 @@ async def record_service_detokenize( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), @@ -309,9 +309,9 @@ async def record_service_tokenize( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Dict[str, typing.Optional[typing.Any]], + typing.Optional[typing.Any], parse_obj_as( - type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore + type_=typing.Optional[typing.Any], # type: ignore object_=_response.json(), ), ), diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index 093756ee..74b8a5d1 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -2,6 +2,11 @@ # isort: skip_file +from .advanced_options_column_mapping import AdvancedOptionsColumnMapping +from .advanced_options_entity_column_map import AdvancedOptionsEntityColumnMap +from .advanced_options_vault_schema import AdvancedOptionsVaultSchema +from .allow_regex import AllowRegex +from .audio_config_transcription_type import AudioConfigTranscriptionType from .audit_event_audit_resource_type import AuditEventAuditResourceType from .audit_event_context import AuditEventContext from .audit_event_data import AuditEventData @@ -9,11 +14,45 @@ from .batch_record_method import BatchRecordMethod from .context_access_type import ContextAccessType from .context_auth_mode import ContextAuthMode +from .deidentify_file_output import DeidentifyFileOutput +from .deidentify_file_output_processed_file_type import DeidentifyFileOutputProcessedFileType +from .deidentify_file_response import DeidentifyFileResponse +from .deidentify_status_response import DeidentifyStatusResponse +from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType +from .deidentify_status_response_status import DeidentifyStatusResponseStatus +from .deidentify_status_response_word_character_count import DeidentifyStatusResponseWordCharacterCount +from .deidentify_string_response import DeidentifyStringResponse +from .detect_data_accuracy import DetectDataAccuracy +from .detect_data_entities import DetectDataEntities +from .detect_file_request_data_type import DetectFileRequestDataType +from .detect_request_deidentify_option import DetectRequestDeidentifyOption +from .detected_entity import DetectedEntity from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from .entity_location import EntityLocation +from .entity_type import EntityType +from .entity_types import EntityTypes +from .error_response import ErrorResponse +from .error_response_error import ErrorResponseError +from .error_string import ErrorString from .googlerpc_status import GooglerpcStatus +from .processed_file_output_processed_file_type import ProcessedFileOutputProcessedFileType from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction +from .reidentify_string_response import ReidentifyStringResponse from .request_action_type import RequestActionType +from .resource_id import ResourceId +from .restrict_regex import RestrictRegex +from .token_type import TokenType +from .token_type_default import TokenTypeDefault +from .token_type_without_vault import TokenTypeWithoutVault +from .token_type_without_vault_default import TokenTypeWithoutVaultDefault +from .transformations import Transformations +from .transformations_shift_dates import TransformationsShiftDates +from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem +from .uuid_ import Uuid +from .v_1_advanced_options import V1AdvancedOptions +from .v_1_audio_config import V1AudioConfig +from .v_1_audio_options import V1AudioOptions from .v_1_audit_after_options import V1AuditAfterOptions from .v_1_audit_event_response import V1AuditEventResponse from .v_1_audit_response import V1AuditResponse @@ -28,25 +67,43 @@ from .v_1_card import V1Card from .v_1_delete_file_response import V1DeleteFileResponse from .v_1_delete_record_response import V1DeleteRecordResponse +from .v_1_detect_file_response import V1DetectFileResponse +from .v_1_detect_status_response import V1DetectStatusResponse +from .v_1_detect_status_response_status import V1DetectStatusResponseStatus +from .v_1_detect_text_request import V1DetectTextRequest +from .v_1_detect_text_response import V1DetectTextResponse from .v_1_detokenize_record_request import V1DetokenizeRecordRequest from .v_1_detokenize_record_response import V1DetokenizeRecordResponse from .v_1_detokenize_response import V1DetokenizeResponse from .v_1_field_records import V1FieldRecords from .v_1_file_av_scan_status import V1FileAvScanStatus +from .v_1_file_data_format import V1FileDataFormat from .v_1_get_auth_token_response import V1GetAuthTokenResponse from .v_1_get_file_scan_status_response import V1GetFileScanStatusResponse from .v_1_get_query_response import V1GetQueryResponse +from .v_1_image_options import V1ImageOptions from .v_1_insert_record_response import V1InsertRecordResponse +from .v_1_locations import V1Locations from .v_1_member_type import V1MemberType +from .v_1_pdf_config import V1PdfConfig +from .v_1_pdf_options import V1PdfOptions +from .v_1_processed_file_output import V1ProcessedFileOutput from .v_1_record_meta_properties import V1RecordMetaProperties +from .v_1_response_entities import V1ResponseEntities from .v_1_tokenize_record_request import V1TokenizeRecordRequest from .v_1_tokenize_record_response import V1TokenizeRecordResponse from .v_1_tokenize_response import V1TokenizeResponse from .v_1_update_record_response import V1UpdateRecordResponse from .v_1_vault_field_mapping import V1VaultFieldMapping from .v_1_vault_schema_config import V1VaultSchemaConfig +from .vault_id import VaultId __all__ = [ + "AdvancedOptionsColumnMapping", + "AdvancedOptionsEntityColumnMap", + "AdvancedOptionsVaultSchema", + "AllowRegex", + "AudioConfigTranscriptionType", "AuditEventAuditResourceType", "AuditEventContext", "AuditEventData", @@ -54,11 +111,45 @@ "BatchRecordMethod", "ContextAccessType", "ContextAuthMode", + "DeidentifyFileOutput", + "DeidentifyFileOutputProcessedFileType", + "DeidentifyFileResponse", + "DeidentifyStatusResponse", + "DeidentifyStatusResponseOutputType", + "DeidentifyStatusResponseStatus", + "DeidentifyStatusResponseWordCharacterCount", + "DeidentifyStringResponse", + "DetectDataAccuracy", + "DetectDataEntities", + "DetectFileRequestDataType", + "DetectRequestDeidentifyOption", + "DetectedEntity", "DetokenizeRecordResponseValueType", + "EntityLocation", + "EntityType", + "EntityTypes", + "ErrorResponse", + "ErrorResponseError", + "ErrorString", "GooglerpcStatus", + "ProcessedFileOutputProcessedFileType", "ProtobufAny", "RedactionEnumRedaction", + "ReidentifyStringResponse", "RequestActionType", + "ResourceId", + "RestrictRegex", + "TokenType", + "TokenTypeDefault", + "TokenTypeWithoutVault", + "TokenTypeWithoutVaultDefault", + "Transformations", + "TransformationsShiftDates", + "TransformationsShiftDatesEntityTypesItem", + "Uuid", + "V1AdvancedOptions", + "V1AudioConfig", + "V1AudioOptions", "V1AuditAfterOptions", "V1AuditEventResponse", "V1AuditResponse", @@ -73,21 +164,34 @@ "V1Card", "V1DeleteFileResponse", "V1DeleteRecordResponse", + "V1DetectFileResponse", + "V1DetectStatusResponse", + "V1DetectStatusResponseStatus", + "V1DetectTextRequest", + "V1DetectTextResponse", "V1DetokenizeRecordRequest", "V1DetokenizeRecordResponse", "V1DetokenizeResponse", "V1FieldRecords", "V1FileAvScanStatus", + "V1FileDataFormat", "V1GetAuthTokenResponse", "V1GetFileScanStatusResponse", "V1GetQueryResponse", + "V1ImageOptions", "V1InsertRecordResponse", + "V1Locations", "V1MemberType", + "V1PdfConfig", + "V1PdfOptions", + "V1ProcessedFileOutput", "V1RecordMetaProperties", + "V1ResponseEntities", "V1TokenizeRecordRequest", "V1TokenizeRecordResponse", "V1TokenizeResponse", "V1UpdateRecordResponse", "V1VaultFieldMapping", "V1VaultSchemaConfig", + "VaultId", ] diff --git a/skyflow/generated/rest/types/advanced_options_column_mapping.py b/skyflow/generated/rest/types/advanced_options_column_mapping.py new file mode 100644 index 00000000..8369b329 --- /dev/null +++ b/skyflow/generated/rest/types/advanced_options_column_mapping.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .advanced_options_entity_column_map import AdvancedOptionsEntityColumnMap + + +class AdvancedOptionsColumnMapping(UniversalBaseModel): + """ + Contains map of what has to be stored in which column. + """ + + session_id: str = pydantic.Field() + """ + Table name of the vault. + """ + + default: str = pydantic.Field() + """ + Name of column to store data in when no explicit mapping exists. + """ + + entity_column_map: typing.Optional[typing.List[AdvancedOptionsEntityColumnMap]] = pydantic.Field(default=None) + """ + Column mapping for different entities. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/advanced_options_entity_column_map.py b/skyflow/generated/rest/types/advanced_options_entity_column_map.py new file mode 100644 index 00000000..debf836a --- /dev/null +++ b/skyflow/generated/rest/types/advanced_options_entity_column_map.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .detect_data_entities import DetectDataEntities + + +class AdvancedOptionsEntityColumnMap(UniversalBaseModel): + """ + Contains map of what entity has to be stored in which column. + """ + + entity_type: typing.Optional[DetectDataEntities] = None + column_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Column name where the entity has to be stored. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/advanced_options_vault_schema.py b/skyflow/generated/rest/types/advanced_options_vault_schema.py new file mode 100644 index 00000000..8496eb97 --- /dev/null +++ b/skyflow/generated/rest/types/advanced_options_vault_schema.py @@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .advanced_options_column_mapping import AdvancedOptionsColumnMapping + + +class AdvancedOptionsVaultSchema(UniversalBaseModel): + """ + Contains table name and column mapping. + """ + + table_name: str = pydantic.Field() + """ + Table name of the vault. + """ + + mapping: AdvancedOptionsColumnMapping + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/allow_regex.py b/skyflow/generated/rest/types/allow_regex.py new file mode 100644 index 00000000..f4164375 --- /dev/null +++ b/skyflow/generated/rest/types/allow_regex.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AllowRegex = typing.List[str] diff --git a/skyflow/generated/rest/types/audio_config_transcription_type.py b/skyflow/generated/rest/types/audio_config_transcription_type.py new file mode 100644 index 00000000..13ad88d9 --- /dev/null +++ b/skyflow/generated/rest/types/audio_config_transcription_type.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +AudioConfigTranscriptionType = typing.Union[ + typing.Literal[ + "none", + "skyflow_transcription", + "aws_transcription", + "aws_transcription_diarize", + "aws_medical_transcription", + "aws_medical_transcription_diarize", + "aws_transcription_diarize_json", + "deepgram_transcription_diarize", + "deepgram_transcription_json", + "deepgram_wrapper", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/deidentify_file_output.py b/skyflow/generated/rest/types/deidentify_file_output.py new file mode 100644 index 00000000..a4c2da4d --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_file_output.py @@ -0,0 +1,45 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .deidentify_file_output_processed_file_type import DeidentifyFileOutputProcessedFileType + + +class DeidentifyFileOutput(UniversalBaseModel): + """ + Details and contents of the processed file. + """ + + processed_file: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="processedFile")] = ( + pydantic.Field(default=None) + ) + """ + URL or base64-encoded data of the output. + """ + + processed_file_type: typing_extensions.Annotated[ + typing.Optional[DeidentifyFileOutputProcessedFileType], FieldMetadata(alias="processedFileType") + ] = pydantic.Field(default=None) + """ + Type of the processed file. + """ + + processed_file_extension: typing_extensions.Annotated[ + typing.Optional[str], FieldMetadata(alias="processedFileExtension") + ] = pydantic.Field(default=None) + """ + Extension of the processed file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py b/skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py new file mode 100644 index 00000000..332ce445 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileOutputProcessedFileType = typing.Union[ + typing.Literal[ + "entities", + "plaintext_transcription", + "redacted_audio", + "redacted_diarized_transcription", + "redacted_file", + "redacted_image", + "redacted_medical_diarized_transcription", + "redacted_medical_transcription", + "redacted_text", + "redacted_transcription", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/deidentify_file_response.py b/skyflow/generated/rest/types/deidentify_file_response.py new file mode 100644 index 00000000..e4e6bf35 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_file_response.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DeidentifyFileResponse(UniversalBaseModel): + """ + Response to de-identify a file. + """ + + run_id: str = pydantic.Field() + """ + Status URL for the detect run. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentify_status_response.py b/skyflow/generated/rest/types/deidentify_status_response.py new file mode 100644 index 00000000..0ad91e62 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_status_response.py @@ -0,0 +1,76 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .deidentify_file_output import DeidentifyFileOutput +from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType +from .deidentify_status_response_status import DeidentifyStatusResponseStatus +from .deidentify_status_response_word_character_count import DeidentifyStatusResponseWordCharacterCount + + +class DeidentifyStatusResponse(UniversalBaseModel): + """ + Response to get the status of a detect run. + """ + + status: DeidentifyStatusResponseStatus = pydantic.Field() + """ + Status of the detect run. + """ + + output: typing.List[DeidentifyFileOutput] = pydantic.Field() + """ + How the input file was specified. + """ + + output_type: typing_extensions.Annotated[ + typing.Optional[DeidentifyStatusResponseOutputType], FieldMetadata(alias="outputType") + ] = pydantic.Field(default=None) + """ + How the output file is specified. + """ + + message: str = pydantic.Field() + """ + Status details about the detect run. + """ + + word_character_count: typing_extensions.Annotated[ + typing.Optional[DeidentifyStatusResponseWordCharacterCount], FieldMetadata(alias="wordCharacterCount") + ] = pydantic.Field(default=None) + """ + Word and character count in the processed text. + """ + + size: typing.Optional[float] = pydantic.Field(default=None) + """ + Size of the processed text in kilobytes (KB). + """ + + duration: typing.Optional[float] = pydantic.Field(default=None) + """ + Duration of the processed audio in seconds. + """ + + pages: typing.Optional[int] = pydantic.Field(default=None) + """ + Number of pages in the processed PDF. + """ + + slides: typing.Optional[int] = pydantic.Field(default=None) + """ + Number of slides in the processed presentation. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentify_status_response_output_type.py b/skyflow/generated/rest/types/deidentify_status_response_output_type.py new file mode 100644 index 00000000..571801c1 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_status_response_output_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyStatusResponseOutputType = typing.Union[typing.Literal["base64", "efs_path"], typing.Any] diff --git a/skyflow/generated/rest/types/deidentify_status_response_status.py b/skyflow/generated/rest/types/deidentify_status_response_status.py new file mode 100644 index 00000000..40262092 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_status_response_status.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyStatusResponseStatus = typing.Union[typing.Literal["failed", "in_progress", "success"], typing.Any] diff --git a/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py b/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py new file mode 100644 index 00000000..6584ca92 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata + + +class DeidentifyStatusResponseWordCharacterCount(UniversalBaseModel): + """ + Word and character count in the processed text. + """ + + word_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="wordCount")] = None + character_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="characterCount")] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentify_string_response.py b/skyflow/generated/rest/types/deidentify_string_response.py new file mode 100644 index 00000000..c141f841 --- /dev/null +++ b/skyflow/generated/rest/types/deidentify_string_response.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .detected_entity import DetectedEntity + + +class DeidentifyStringResponse(UniversalBaseModel): + """ + Response to deidentify a string. + """ + + processed_text: str = pydantic.Field() + """ + De-identified text. + """ + + entities: typing.List[DetectedEntity] = pydantic.Field() + """ + Detected entities. + """ + + word_count: int = pydantic.Field() + """ + Number of words from the input text. + """ + + character_count: int = pydantic.Field() + """ + Number of characters from the input text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/detect_data_accuracy.py b/skyflow/generated/rest/types/detect_data_accuracy.py new file mode 100644 index 00000000..91e3619e --- /dev/null +++ b/skyflow/generated/rest/types/detect_data_accuracy.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectDataAccuracy = typing.Union[ + typing.Literal[ + "unknown", + "standard", + "standard_plus", + "standard_plus_multilingual", + "standard_plus_automatic", + "high", + "high_multilingual", + "high_automatic", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/detect_data_entities.py b/skyflow/generated/rest/types/detect_data_entities.py new file mode 100644 index 00000000..4ac0bd49 --- /dev/null +++ b/skyflow/generated/rest/types/detect_data_entities.py @@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectDataEntities = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender_sexuality", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/detect_file_request_data_type.py b/skyflow/generated/rest/types/detect_file_request_data_type.py new file mode 100644 index 00000000..825d4778 --- /dev/null +++ b/skyflow/generated/rest/types/detect_file_request_data_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectFileRequestDataType = typing.Union[typing.Literal["UNKNOWN", "BASE64"], typing.Any] diff --git a/skyflow/generated/rest/types/detect_request_deidentify_option.py b/skyflow/generated/rest/types/detect_request_deidentify_option.py new file mode 100644 index 00000000..caee5f16 --- /dev/null +++ b/skyflow/generated/rest/types/detect_request_deidentify_option.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectRequestDeidentifyOption = typing.Union[typing.Literal["UNKNOWN", "ENTITY_UNQ_COUNTER", "ENTITY_ONLY"], typing.Any] diff --git a/skyflow/generated/rest/types/detected_entity.py b/skyflow/generated/rest/types/detected_entity.py new file mode 100644 index 00000000..c34ba2ca --- /dev/null +++ b/skyflow/generated/rest/types/detected_entity.py @@ -0,0 +1,43 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .entity_location import EntityLocation + + +class DetectedEntity(UniversalBaseModel): + """ + Detected entities. + """ + + token: typing.Optional[str] = pydantic.Field(default=None) + """ + Processed text of the entity. + """ + + value: typing.Optional[str] = pydantic.Field(default=None) + """ + Original text of the entity. + """ + + location: typing.Optional[EntityLocation] = None + entity_type: typing.Optional[str] = pydantic.Field(default=None) + """ + Highest-rated label. + """ + + entity_scores: typing.Optional[typing.Dict[str, float]] = pydantic.Field(default=None) + """ + entity_scores and their scores. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/entity_location.py b/skyflow/generated/rest/types/entity_location.py new file mode 100644 index 00000000..487f9c72 --- /dev/null +++ b/skyflow/generated/rest/types/entity_location.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class EntityLocation(UniversalBaseModel): + """ + Locations of an entity in the text. + """ + + start_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the original text. + """ + + end_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the original text. + """ + + start_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the processed text. + """ + + end_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the processed text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/entity_type.py b/skyflow/generated/rest/types/entity_type.py new file mode 100644 index 00000000..6b48f1d8 --- /dev/null +++ b/skyflow/generated/rest/types/entity_type.py @@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +EntityType = typing.Union[ + typing.Literal[ + "account_number", + "age", + "all", + "bank_account", + "blood_type", + "condition", + "corporate_action", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "dose", + "driver_license", + "drug", + "duration", + "email_address", + "event", + "filename", + "financial_metric", + "gender_sexuality", + "healthcare_number", + "injury", + "ip_address", + "language", + "location", + "location_address", + "location_address_street", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "medical_code", + "medical_process", + "money", + "name", + "name_family", + "name_given", + "name_medical_professional", + "numerical_pii", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "phone_number", + "physical_attribute", + "political_affiliation", + "product", + "religion", + "routing_number", + "ssn", + "statistics", + "time", + "trend", + "url", + "username", + "vehicle_id", + "zodiac_sign", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/entity_types.py b/skyflow/generated/rest/types/entity_types.py new file mode 100644 index 00000000..3adb0438 --- /dev/null +++ b/skyflow/generated/rest/types/entity_types.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .entity_type import EntityType + +EntityTypes = typing.List[EntityType] diff --git a/skyflow/generated/rest/types/error_response.py b/skyflow/generated/rest/types/error_response.py new file mode 100644 index 00000000..7c0491bb --- /dev/null +++ b/skyflow/generated/rest/types/error_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .error_response_error import ErrorResponseError + + +class ErrorResponse(UniversalBaseModel): + error: ErrorResponseError + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/error_response_error.py b/skyflow/generated/rest/types/error_response_error.py new file mode 100644 index 00000000..722b69cc --- /dev/null +++ b/skyflow/generated/rest/types/error_response_error.py @@ -0,0 +1,35 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class ErrorResponseError(UniversalBaseModel): + grpc_code: int = pydantic.Field() + """ + gRPC status codes. See https://grpc.io/docs/guides/status-codes. + """ + + http_code: int = pydantic.Field() + """ + HTTP status codes. See https://developer.mozilla.org/en-US/docs/Web/HTTP/Status. + """ + + http_status: str = pydantic.Field() + """ + HTTP status message. + """ + + message: str + details: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/error_string.py b/skyflow/generated/rest/types/error_string.py new file mode 100644 index 00000000..068b4a84 --- /dev/null +++ b/skyflow/generated/rest/types/error_string.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +ErrorString = str diff --git a/skyflow/generated/rest/types/processed_file_output_processed_file_type.py b/skyflow/generated/rest/types/processed_file_output_processed_file_type.py new file mode 100644 index 00000000..18758eaa --- /dev/null +++ b/skyflow/generated/rest/types/processed_file_output_processed_file_type.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ProcessedFileOutputProcessedFileType = typing.Union[ + typing.Literal[ + "none", + "redacted_audio", + "redacted_image", + "redacted_transcription", + "redacted_file", + "redacted_text", + "entities", + "redacted_aws_transcription_diarize_json", + "redacted_deepgram_transcription_diarize_json", + "plaintext_transcribed", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/reidentify_string_response.py b/skyflow/generated/rest/types/reidentify_string_response.py new file mode 100644 index 00000000..8284806b --- /dev/null +++ b/skyflow/generated/rest/types/reidentify_string_response.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class ReidentifyStringResponse(UniversalBaseModel): + """ + Re-identify string response. + """ + + processed_text: typing.Optional[str] = pydantic.Field(default=None) + """ + Re-identified text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/resource_id.py b/skyflow/generated/rest/types/resource_id.py new file mode 100644 index 00000000..211b4108 --- /dev/null +++ b/skyflow/generated/rest/types/resource_id.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +ResourceId = str diff --git a/skyflow/generated/rest/types/restrict_regex.py b/skyflow/generated/rest/types/restrict_regex.py new file mode 100644 index 00000000..06dd46b7 --- /dev/null +++ b/skyflow/generated/rest/types/restrict_regex.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +RestrictRegex = typing.List[str] diff --git a/skyflow/generated/rest/types/token_type.py b/skyflow/generated/rest/types/token_type.py new file mode 100644 index 00000000..200b9630 --- /dev/null +++ b/skyflow/generated/rest/types/token_type.py @@ -0,0 +1,39 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .entity_type import EntityType +from .token_type_default import TokenTypeDefault + + +class TokenType(UniversalBaseModel): + """ + Mapping of tokens to generation for detected entities. Can't be specified together with `token_type`. + """ + + default: typing.Optional[TokenTypeDefault] = None + vault_token: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to replace with vault tokens. + """ + + entity_unq_counter: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to replace with entity tokens with unique counters. + """ + + entity_only: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to replace with entity tokens. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/token_type_default.py b/skyflow/generated/rest/types/token_type_default.py new file mode 100644 index 00000000..cfda9f4b --- /dev/null +++ b/skyflow/generated/rest/types/token_type_default.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeDefault = typing.Union[typing.Literal["entity_only", "entity_unq_counter", "vault_token"], typing.Any] diff --git a/skyflow/generated/rest/types/token_type_without_vault.py b/skyflow/generated/rest/types/token_type_without_vault.py new file mode 100644 index 00000000..d79a3477 --- /dev/null +++ b/skyflow/generated/rest/types/token_type_without_vault.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .entity_type import EntityType +from .token_type_without_vault_default import TokenTypeWithoutVaultDefault + + +class TokenTypeWithoutVault(UniversalBaseModel): + """ + Mapping of tokens to generation for detected entities. Can't be specified together with `token_type`. + """ + + default: typing.Optional[TokenTypeWithoutVaultDefault] = None + entity_unq_counter: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to replace with entity tokens with unique counters. + """ + + entity_only: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to replace with entity tokens. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/token_type_without_vault_default.py b/skyflow/generated/rest/types/token_type_without_vault_default.py new file mode 100644 index 00000000..53d71dc6 --- /dev/null +++ b/skyflow/generated/rest/types/token_type_without_vault_default.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeWithoutVaultDefault = typing.Union[typing.Literal["entity_only", "entity_unq_counter"], typing.Any] diff --git a/skyflow/generated/rest/types/transformations.py b/skyflow/generated/rest/types/transformations.py new file mode 100644 index 00000000..352df144 --- /dev/null +++ b/skyflow/generated/rest/types/transformations.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .transformations_shift_dates import TransformationsShiftDates + + +class Transformations(UniversalBaseModel): + """ + Transformations to apply to the detected entities. + """ + + shift_dates: typing.Optional[TransformationsShiftDates] = pydantic.Field(default=None) + """ + Shift dates by a specified number of days. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/transformations_shift_dates.py b/skyflow/generated/rest/types/transformations_shift_dates.py new file mode 100644 index 00000000..21b21af8 --- /dev/null +++ b/skyflow/generated/rest/types/transformations_shift_dates.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem + + +class TransformationsShiftDates(UniversalBaseModel): + """ + Shift dates by a specified number of days. + """ + + max_days: typing.Optional[int] = pydantic.Field(default=None) + """ + Maximum number of days to shift the date by. + """ + + min_days: typing.Optional[int] = pydantic.Field(default=None) + """ + Minimum number of days to shift the date by. + """ + + entity_types: typing.Optional[typing.List[TransformationsShiftDatesEntityTypesItem]] = pydantic.Field(default=None) + """ + Entity types to shift dates for. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py b/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py new file mode 100644 index 00000000..f8d98df6 --- /dev/null +++ b/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TransformationsShiftDatesEntityTypesItem = typing.Union[typing.Literal["date", "date_interval", "dob"], typing.Any] diff --git a/skyflow/generated/rest/types/uuid_.py b/skyflow/generated/rest/types/uuid_.py new file mode 100644 index 00000000..cf319b43 --- /dev/null +++ b/skyflow/generated/rest/types/uuid_.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +Uuid = str diff --git a/skyflow/generated/rest/types/v_1_advanced_options.py b/skyflow/generated/rest/types/v_1_advanced_options.py new file mode 100644 index 00000000..ea893551 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_advanced_options.py @@ -0,0 +1,38 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .advanced_options_vault_schema import AdvancedOptionsVaultSchema + + +class V1AdvancedOptions(UniversalBaseModel): + """ + Advanced options for post processing. + """ + + date_shift: typing.Optional[int] = pydantic.Field(default=None) + """ + No. of days by which original date has to be shifted to. + """ + + custom_client: typing.Optional[bool] = pydantic.Field(default=None) + """ + Custom client specific logic. + """ + + schema_: typing_extensions.Annotated[typing.Optional[AdvancedOptionsVaultSchema], FieldMetadata(alias="schema")] = ( + None + ) + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audio_config.py b/skyflow/generated/rest/types/v_1_audio_config.py new file mode 100644 index 00000000..dc866e47 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audio_config.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .audio_config_transcription_type import AudioConfigTranscriptionType +from .v_1_audio_options import V1AudioOptions + + +class V1AudioConfig(UniversalBaseModel): + """ + How to handle audio files. + """ + + output_transcription: typing.Optional[AudioConfigTranscriptionType] = None + output_processed_audio: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, includes processed audio file in the response. + """ + + options: typing.Optional[V1AudioOptions] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audio_options.py b/skyflow/generated/rest/types/v_1_audio_options.py new file mode 100644 index 00000000..6e5b3df9 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_audio_options.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class V1AudioOptions(UniversalBaseModel): + """ + Options for audio files. + """ + + bleep_start_padding: typing.Optional[float] = pydantic.Field(default=None) + """ + Padding added to the beginning of a bleep, in seconds. + """ + + bleep_end_padding: typing.Optional[float] = pydantic.Field(default=None) + """ + Padding added to the end of a bleep, in seconds. + """ + + distortion_steps: typing.Optional[int] = pydantic.Field(default=None) + """ + Specifies how the distortion will be made. Providing a number more than 0 will result in a higher tone and a coefficient less than 0 will result in a lower tone. + """ + + bleep_frequency: typing.Optional[int] = pydantic.Field(default=None) + """ + This parameter configures the frequency of the sine wave used for the bleep sound in an audio segment. + """ + + bleep_gain: typing.Optional[int] = pydantic.Field(default=None) + """ + It controls the relative loudness of the bleep,positive values increase its loudness, and negative values decrease it. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_file_response.py b/skyflow/generated/rest/types/v_1_detect_file_response.py new file mode 100644 index 00000000..f933703e --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detect_file_response.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class V1DetectFileResponse(UniversalBaseModel): + """ + Response to deidentify a file. + """ + + status_url: typing.Optional[str] = pydantic.Field(default=None) + """ + Status URL for the deidentification request. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_status_response.py b/skyflow/generated/rest/types/v_1_detect_status_response.py new file mode 100644 index 00000000..ac859394 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detect_status_response.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_detect_status_response_status import V1DetectStatusResponseStatus +from .v_1_processed_file_output import V1ProcessedFileOutput + + +class V1DetectStatusResponse(UniversalBaseModel): + """ + Response to get the status of a file deidentification request. + """ + + status: typing.Optional[V1DetectStatusResponseStatus] = None + output: typing.Optional[typing.List[V1ProcessedFileOutput]] = pydantic.Field(default=None) + """ + How the input file was specified. + """ + + message: typing.Optional[str] = pydantic.Field(default=None) + """ + Status details about the deidentification request. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_status_response_status.py b/skyflow/generated/rest/types/v_1_detect_status_response_status.py new file mode 100644 index 00000000..1b9531cb --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detect_status_response_status.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +V1DetectStatusResponseStatus = typing.Union[typing.Literal["UNKNOWN", "FAILED", "SUCCESS", "IN_PROGRESS"], typing.Any] diff --git a/skyflow/generated/rest/types/v_1_detect_text_request.py b/skyflow/generated/rest/types/v_1_detect_text_request.py new file mode 100644 index 00000000..f832ef7b --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detect_text_request.py @@ -0,0 +1,68 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .detect_data_accuracy import DetectDataAccuracy +from .detect_data_entities import DetectDataEntities +from .detect_request_deidentify_option import DetectRequestDeidentifyOption +from .v_1_advanced_options import V1AdvancedOptions + + +class V1DetectTextRequest(UniversalBaseModel): + """ + Request to deidentify a string. + """ + + text: str = pydantic.Field() + """ + Data to deidentify. + """ + + vault_id: str = pydantic.Field() + """ + ID of the vault. + """ + + session_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Will give a handle to delete the tokens generated during a specific interaction. + """ + + restrict_entity_types: typing.Optional[typing.List[DetectDataEntities]] = pydantic.Field(default=None) + """ + Entities to detect and deidentify. + """ + + deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = None + allow_regex: typing.Optional[typing.List[str]] = pydantic.Field(default=None) + """ + Regular expressions to ignore when detecting entities. + """ + + restrict_regex: typing.Optional[typing.List[str]] = pydantic.Field(default=None) + """ + Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. + """ + + return_entities: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, returns the details for the detected entities. + """ + + accuracy: typing.Optional[DetectDataAccuracy] = None + advanced_options: typing.Optional[V1AdvancedOptions] = None + store_entities: typing.Optional[bool] = pydantic.Field(default=None) + """ + Indicates whether entities should be stored in the vault. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_text_response.py b/skyflow/generated/rest/types/v_1_detect_text_response.py new file mode 100644 index 00000000..954e7d07 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_detect_text_response.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_response_entities import V1ResponseEntities + + +class V1DetectTextResponse(UniversalBaseModel): + """ + Response to deidentify a string. + """ + + processed_text: typing.Optional[str] = pydantic.Field(default=None) + """ + Deidentified text. If the input was a file, text that was extracted or transcribed from the file and deidentified. + """ + + entities: typing.Optional[typing.List[V1ResponseEntities]] = pydantic.Field(default=None) + """ + Detected entities. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_file_data_format.py b/skyflow/generated/rest/types/v_1_file_data_format.py new file mode 100644 index 00000000..f717c793 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_file_data_format.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +V1FileDataFormat = typing.Union[ + typing.Literal[ + "bmp", + "csv", + "doc", + "docx", + "jpeg", + "jpg", + "json", + "mp3", + "pdf", + "png", + "ppt", + "pptx", + "tif", + "tiff", + "txt", + "unknown", + "wav", + "xls", + "xlsx", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/v_1_image_options.py b/skyflow/generated/rest/types/v_1_image_options.py new file mode 100644 index 00000000..7f4143df --- /dev/null +++ b/skyflow/generated/rest/types/v_1_image_options.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class V1ImageOptions(UniversalBaseModel): + """ + How to handle image files. + """ + + output_processed_image: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, includes processed image in the output. + """ + + output_ocr_text: typing.Optional[bool] = pydantic.Field(default=None) + """ + If `true`, includes OCR text output in the response. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_locations.py b/skyflow/generated/rest/types/v_1_locations.py new file mode 100644 index 00000000..098d6b6e --- /dev/null +++ b/skyflow/generated/rest/types/v_1_locations.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class V1Locations(UniversalBaseModel): + """ + Locations of an entity in the text. + """ + + start_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the original text. + """ + + end_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the original text. + """ + + start_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the processed text. + """ + + end_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the processed text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_pdf_config.py b/skyflow/generated/rest/types/v_1_pdf_config.py new file mode 100644 index 00000000..eff6107e --- /dev/null +++ b/skyflow/generated/rest/types/v_1_pdf_config.py @@ -0,0 +1,24 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_pdf_options import V1PdfOptions + + +class V1PdfConfig(UniversalBaseModel): + """ + How to handle PDF files. + """ + + options: typing.Optional[V1PdfOptions] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_pdf_options.py b/skyflow/generated/rest/types/v_1_pdf_options.py new file mode 100644 index 00000000..28fdf1bc --- /dev/null +++ b/skyflow/generated/rest/types/v_1_pdf_options.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class V1PdfOptions(UniversalBaseModel): + """ + How to handle PDF files. + """ + + density: typing.Optional[int] = pydantic.Field(default=None) + """ + Pixel density at which to process the PDF file. + """ + + max_resolution: typing.Optional[int] = pydantic.Field(default=None) + """ + Max resolution at which to process the PDF file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_processed_file_output.py b/skyflow/generated/rest/types/v_1_processed_file_output.py new file mode 100644 index 00000000..80968814 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_processed_file_output.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .detect_file_request_data_type import DetectFileRequestDataType +from .processed_file_output_processed_file_type import ProcessedFileOutputProcessedFileType + + +class V1ProcessedFileOutput(UniversalBaseModel): + """ + Contains details and contents of the processed file. + """ + + output_type: typing.Optional[DetectFileRequestDataType] = None + processed_file: typing.Optional[str] = pydantic.Field(default=None) + """ + URL or base64-encoded data of the output. + """ + + processed_file_type: typing.Optional[ProcessedFileOutputProcessedFileType] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_response_entities.py b/skyflow/generated/rest/types/v_1_response_entities.py new file mode 100644 index 00000000..fbc0ecf4 --- /dev/null +++ b/skyflow/generated/rest/types/v_1_response_entities.py @@ -0,0 +1,43 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .v_1_locations import V1Locations + + +class V1ResponseEntities(UniversalBaseModel): + """ + Detected entities. + """ + + processed_text: typing.Optional[str] = pydantic.Field(default=None) + """ + Processed text of the entity. + """ + + original_text: typing.Optional[str] = pydantic.Field(default=None) + """ + Original text of the entity. + """ + + location: typing.Optional[V1Locations] = None + best_label: typing.Optional[str] = pydantic.Field(default=None) + """ + Highest rated label. + """ + + labels: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Labels and their scores. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/vault_id.py b/skyflow/generated/rest/types/vault_id.py new file mode 100644 index 00000000..02ae7d21 --- /dev/null +++ b/skyflow/generated/rest/types/vault_id.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +VaultId = str diff --git a/skyflow/generated/rest/version.py b/skyflow/generated/rest/version.py index 5a6bc65e..e5d18b20 100644 --- a/skyflow/generated/rest/version.py +++ b/skyflow/generated/rest/version.py @@ -1 +1 @@ -__version__ = "2.0.0" \ No newline at end of file +__version__ = "2.0.9" diff --git a/skyflow/utils/__init__.py b/skyflow/utils/__init__.py index d5b072f4..67905512 100644 --- a/skyflow/utils/__init__.py +++ b/skyflow/utils/__init__.py @@ -2,4 +2,4 @@ from ._skyflow_messages import SkyflowMessages from ._version import SDK_VERSION from ._helpers import get_base_url, format_scope -from ._utils import get_credentials, get_vault_url, construct_invoke_connection_request, get_metrics, parse_insert_response, handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_get_response, parse_invoke_connection_response, validate_api_key, encode_column_values +from ._utils import get_credentials, get_vault_url, construct_invoke_connection_request, get_metrics, parse_insert_response, handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_get_response, parse_invoke_connection_response, validate_api_key, encode_column_values, parse_deidentify_text_response, parse_reidentify_text_response, convert_detected_entity_to_entity_info diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index 4bc95354..a5b94451 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -71,6 +71,15 @@ class Error(Enum): RESPONSE_NOT_JSON = f"{error_prefix} Response {{}} is not valid JSON." API_ERROR = f"{error_prefix} Server returned status code {{}}" + INVALID_FILE_INPUT = f"{error_prefix} Validation error. Invalid file input. Specify a valid file input." + INVALID_DETECT_ENTITIES_TYPE = f"{error_prefix} Validation error. Invalid type of detect entities. Specify detect entities as list of DetectEntities enum." + INVALID_TYPE_FOR_DEFAULT_TOKEN_TYPE = f"{error_prefix} Validation error. Invalid type of default token type. Specify default token type as TokenType enum." + INVALID_TOKEN_TYPE_VALUE = f"{error_prefix} Validation error. Invalid value for token type {{}}. Specify as list of DetectEntities enum." + INVALID_MAXIMUM_RESOLUTION = f"{error_prefix} Validation error. Invalid type of maximum resolution. Specify maximum resolution as a number." + INVALID_OUTPUT_DIRECTORY_VALUE = f"{error_prefix} Validation error. Invalid type of output directory. Specify output directory as a string." + WAIT_TIME_GREATER_THEN_64 = f"{error_prefix} Validation error. Invalid wait time. The waitTime value must be between 0 and 64 seconds." + OUTPUT_DIRECTORY_NOT_FOUND = f"{error_prefix} Validation error. Invalid output directory. Directory {{}} not found." + MISSING_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Table name cannot be empty in insert request. Specify a table name." INVALID_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Invalid table name in insert request. Specify a valid table name." INVALID_TYPE_OF_DATA_IN_INSERT = f"{error_prefix} Validation error. Invalid type of data in insert request. Specify data as a object array." @@ -147,6 +156,57 @@ class Error(Enum): FILE_INVALID_JSON = f"{error_prefix} Initialization failed. File at {{}} is not in valid JSON format. Verify the file contents." INVALID_JSON_FORMAT_IN_CREDENTIALS_ENV = f"{error_prefix} Validation error. Invalid JSON format in SKYFLOW_CREDENTIALS environment variable." + INVALID_TEXT_IN_DEIDENTIFY= f"{error_prefix} Validation error. The text field is required and must be a non-empty string. Specify a valid text." + INVALID_ENTITIES_IN_DEIDENTIFY= f"{error_prefix} Validation error. The entities field must be an array of DetectEntities enums. Specify a valid entities." + INVALID_ALLOW_REGEX_LIST= f"{error_prefix} Validation error. The allowRegexList field must be an array of strings. Specify a valid allow_regex_list." + INVALID_RESTRICT_REGEX_LIST= f"{error_prefix} Validation error. The restrictRegexList field must be an array of strings. Specify a valid restrict_regex_list." + INVALID_TOKEN_FORMAT= f"{error_prefix} Validation error. The tokenFormat key must be an instance of TokenFormat. Specify a valid token format." + INVALID_TRANSFORMATIONS= f"{error_prefix} Validation error. The transformations key must be an instance of Transformations. Specify a valid transformations." + + INVALID_TEXT_IN_REIDENTIFY= f"{error_prefix} Validation error. The text field is required and must be a non-empty string. Specify a valid text." + INVALID_REDACTED_ENTITIES_IN_REIDENTIFY= f"{error_prefix} Validation error. The redactedEntities field must be an array of DetectEntities enums. Specify a valid redactedEntities." + INVALID_MASKED_ENTITIES_IN_REIDENTIFY= f"{error_prefix} Validation error. The maskedEntities field must be an array of DetectEntities enums. Specify a valid maskedEntities." + INVALID_PLAIN_TEXT_ENTITIES_IN_REIDENTIFY= f"{error_prefix} Validation error. The plainTextEntities field must be an array of DetectEntities enums. Specify a valid plainTextEntities." + + INVALID_DEIDENTIFY_FILE_REQUEST= f"{error_prefix} Validation error. Invalid deidentify file request. Specify a valid deidentify file request." + EMPTY_FILE_OBJECT= f"{error_prefix} Validation error. File object cannot be empty. Specify a valid file object." + INVALID_FILE_FORMAT= f"{error_prefix} Validation error. Invalid file format. Specify a valid file format." + MISSING_FILE_SOURCE= f"{error_prefix} Validation error. Provide exactly one of filePath, base64, or fileObject." + INVALID_FILE_OBJECT= f"{error_prefix} Validation error. Invalid file object. Specify a valid file object." + INVALID_BASE64_STRING= f"{error_prefix} Validation error. Invalid base64 string. Specify a valid base64 string." + INVALID_DEIDENTIFY_FILE_OPTIONS= f"{error_prefix} Validation error. Invalid deidentify file options. Specify a valid deidentify file options." + INVALID_ENTITIES= f"{error_prefix} Validation error. Invalid entities. Specify valid entities as string array." + EMPTY_ENTITIES= f"{error_prefix} Validation error. Entities cannot be empty. Specify valid entities." + EMPTY_ALLOW_REGEX_LIST= f"{error_prefix} Validation error. Allow regex list cannot be empty. Specify valid allow regex list." + INVALID_ALLOW_REGEX= f"{error_prefix} Validation error. Invalid allow regex. Specify valid allow regex at index {{}}." + EMPTY_RESTRICT_REGEX_LIST= f"{error_prefix} Validation error. Restrict regex list cannot be empty. Specify valid restrict regex list." + INVALID_RESTRICT_REGEX= f"{error_prefix} Validation error. Invalid restrict regex. Specify valid restrict regex at index {{}}." + INVALID_OUTPUT_PROCESSED_IMAGE= f"{error_prefix} Validation error. Invalid output processed image. Specify valid output processed image as boolean." + INVALID_OUTPUT_OCR_TEXT= f"{error_prefix} Validation error. Invalid output ocr text. Specify valid output ocr text as boolean." + INVALID_MASKING_METHOD= f"{error_prefix} Validation error. Invalid masking method. Specify valid masking method as MaskingMethod enum." + INVALID_PIXEL_DENSITY= f"{error_prefix} Validation error. Invalid pixel density. Specify valid pixel density as number." + INVALID_OUTPUT_TRANSCRIPTION= f"{error_prefix} Validation error. Invalid output transcription. Specify valid output transcription as DetectOutputTranscriptions enum." + INVALID_BLEEP_TYPE= f"{error_prefix} Validation error. Invalid type of bleep. Specify bleep as Bleep object." + INVALID_BLEEP_GAIN= f"{error_prefix} Validation error. Invalid bleep gain. Specify valid bleep gain as a number." + INVALID_BLEEP_FREQUENCY= f"{error_prefix} Validation error. Invalid bleep frequency. Specify valid bleep frequency as a number." + INVALID_BLEEP_START_PADDING= f"{error_prefix} Validation error. Invalid bleep start padding. Specify valid bleep start padding as a number." + INVALID_BLEEP_STOP_PADDING= f"{error_prefix} Validation error. Invalid bleep stop padding. Specify valid bleep stop padding as a number." + INVALID_OUTPUT_PROCESSED_AUDIO= f"{error_prefix} Validation error. Invalid output processed audio. Specify valid output processed audio as boolean." + INVALID_MAX_RESOLUTION= f"{error_prefix} Validation error. Invalid max resolution. Specify valid max resolution as string." + INVALID_BLEEP= f"{error_prefix} Validation error. Invalid bleep. Specify valid bleep as object." + INVALID_FILE_OR_ENCODED_FILE= f"{error_prefix} . Error while decoding base64 and saving file" + INVALID_FILE_TYPE = f"{error_prefix} Validation error. Invalid file type. Specify a valid file type." + INVALID_FILE_NAME= f"{error_prefix} Validation error. Invalid file name. Specify a valid file name." + FILE_READ_ERROR= f"{error_prefix} Validation error. Unable to read file. Verify the file path." + INVALID_BASE64_HEADER= f"{error_prefix} Validation error. Invalid base64 header. Specify a valid base64 header." + INVALID_WAIT_TIME= f"{error_prefix} Validation error. Invalid wait time. Specify a valid wait time as number and should not be greater than 64 secs." + INVALID_OUTPUT_DIRECTORY= f"{error_prefix} Validation error. Invalid output directory. Specify a valid output directory as string." + INVALID_OUTPUT_DIRECTORY_PATH= f"{error_prefix} Validation error. Invalid output directory path. Specify a valid output directory path as string." + EMPTY_RUN_ID= f"{error_prefix} Validation error. Run id cannot be empty. Specify a valid run id." + INVALID_RUN_ID= f"{error_prefix} Validation error. Invalid run id. Specify a valid run id as string." + INTERNAL_SERVER_ERROR= f"{error_prefix}. Internal server error. {{}}." + GET_DETECT_RUN_FAILED = f"{error_prefix} Get detect run operation failed." + class Info(Enum): CLIENT_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized skyflow client." VALIDATING_VAULT_CONFIG = f"{INFO}: [{error_prefix}] Validating vault config." @@ -154,6 +214,7 @@ class Info(Enum): UNABLE_TO_GENERATE_SDK_METRIC = f"{INFO}: [{error_prefix}] Unable to generate {{}} metric." VAULT_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized vault controller with vault ID {{}}." CONNECTION_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized connection controller with connection ID {{}}." + DETECT_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized detect controller with vault ID {{}}." VAULT_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} already exists." VAULT_CONFIG_DOES_NOT_EXIST = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} doesn't exist." CONNECTION_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Connection config with connection ID {{}} already exists." @@ -166,9 +227,13 @@ class Info(Enum): GET_BEARER_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Bearer token generated." GET_SIGNED_DATA_TOKENS_TRIGGERED = f"{INFO}: [{error_prefix}] generate_signed_data_tokens method triggered." GET_SIGNED_DATA_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Signed data tokens generated." - GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED = f"{INFO}: [{error_prefix}] generate bearer_token_from_credential_string method triggered ." + GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED = f"{INFO}: [{error_prefix}] generate bearer_token_from_credential_string method triggered." REUSE_BEARER_TOKEN = f"{INFO}: [{error_prefix}] Reusing bearer token." + VALIDATE_DEIDENTIFY_FILE_REQUEST = f"{INFO}: [{error_prefix}] Validating deidentify file request." + DETECT_FILE_TRIGGERED = f"{INFO}: [{error_prefix}] Detect file method triggered." + DETECT_FILE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Deidentify file request resolved." + DETECT_FILE_SUCCESS = f"{INFO}: [{error_prefix}] File deidentified." VALIDATE_INSERT_REQUEST = f"{INFO}: [{error_prefix}] Validating insert request." INSERT_TRIGGERED = f"{INFO}: [{error_prefix}] Insert method triggered." @@ -210,6 +275,28 @@ class Info(Enum): INVOKE_CONNECTION_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Invoke connection request resolved." INVOKE_CONNECTION_SUCCESS = f"{INFO}: [{error_prefix}] Invoke Connection Success." + DEIDENTIFY_TEXT_TRIGGERED = f"{INFO}: [{error_prefix}] Deidentify text method triggered." + VALIDATING_DEIDENTIFY_TEXT_INPUT = f"{INFO}: [{error_prefix}] Validating deidentify text input." + DEIDENTIFY_TEXT_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Deidentify text request is resolved." + DEIDENTIFY_TEXT_SUCCESS = f"{INFO}: [{error_prefix}] Data deidentified." + + REIDENTIFY_TEXT_TRIGGERED = f"{INFO}: [{error_prefix}] Reidentify text method triggered." + VALIDATING_REIDENTIFY_TEXT_INPUT = f"{INFO}: [{error_prefix}] Validating reidentify text input." + REIDENTIFY_TEXT_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Reidentify text request is resolved." + REIDENTIFY_TEXT_SUCCESS = f"{INFO}: [{error_prefix}] Data reidentified." + + DEIDENTIFY_FILE_TRIGGERED = f"{INFO}: [{error_prefix}] Deidentify file triggered." + VALIDATING_DETECT_FILE_INPUT = f"{INFO}: [{error_prefix}] Validating deidentify file input." + DEIDENTIFY_FILE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Deidentify file request is resolved." + DEIDENTIFY_FILE_SUCCESS = f"{INFO}: [{error_prefix}] File deidentified." + + GET_DETECT_RUN_TRIGGERED = f"{INFO}: [{error_prefix}] Get detect run triggered." + VALIDATING_GET_DETECT_RUN_INPUT = f"{INFO}: [{error_prefix}] Validating get detect run input." + GET_DETECT_RUN_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Get detect run request is resolved." + GET_DETECT_RUN_SUCCESS = f"{INFO}: [{error_prefix}] Get detect run success." + + DETECT_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Detect request is resolved." + class ErrorLogs(Enum): VAULTID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID is required." EMPTY_VAULTID = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID can not be empty." @@ -280,6 +367,14 @@ class ErrorLogs(Enum): QUERY_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Query request resulted in failure." GET_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Get request resulted in failure." INVOKE_CONNECTION_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Invoke connection request resulted in failure." + + EMPTY_RUN_ID = f"{ERROR}: [{error_prefix}] Validation error. Run id cannot be empty. Specify a valid run id." + INVALID_RUN_ID = f"{ERROR}: [{error_prefix}] Validation error. Invalid run id. Specify a valid run id as string." + DEIDENTIFY_FILE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify file resulted in failure." + DETECT_RUN_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Detect get run resulted in failure." + DEIDENTIFY_TEXT_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify text resulted in failure." + REIDENTIFY_TEXT_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Reidentify text resulted in failure." + DETECT_FILE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify file resulted in failure." class Interface(Enum): INSERT = "INSERT" diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 4dbd32a3..6b013a85 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -13,9 +13,12 @@ from urllib.parse import quote from skyflow.error import SkyflowError from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ - V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse + V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse, \ + DeidentifyStringResponse, ReidentifyStringResponse, ErrorResponse from skyflow.generated.rest.core.http_response import HttpResponse from skyflow.utils.logger import log_error_log +from skyflow.vault.detect import DeidentifyTextResponse, ReidentifyTextResponse +from skyflow.vault.detect import EntityInfo, TextIndex from . import SkyflowMessages, SDK_VERSION from .constants import PROTOCOL from .enums import Env, ContentType, EnvUrls @@ -82,6 +85,22 @@ def to_lowercase_keys(dict): return result +def convert_detected_entity_to_entity_info(detected_entity): + return EntityInfo( + token=detected_entity.token, + value=detected_entity.value, + text_index=TextIndex( + start=detected_entity.location.start_index, + end=detected_entity.location.end_index + ), + processed_index=TextIndex( + start=detected_entity.location.start_index_processed, + end=detected_entity.location.end_index_processed + ), + entity=detected_entity.entity_type, + scores=detected_entity.entity_scores + ) + def construct_invoke_connection_request(request, connection_url, logger) -> PreparedRequest: url = parse_path_params(connection_url.rstrip('/'), request.path_params) @@ -364,6 +383,18 @@ def parse_invoke_connection_response(api_response: requests.Response): message = SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content) raise SkyflowError(message, status_code) +def parse_deidentify_text_response(api_response: DeidentifyStringResponse): + entities = [convert_detected_entity_to_entity_info(entity) for entity in api_response.entities] + return DeidentifyTextResponse( + processed_text=api_response.processed_text, + entities=entities, + word_count=api_response.word_count, + char_count=api_response.character_count + ) + +def parse_reidentify_text_response(api_response: ReidentifyStringResponse): + return ReidentifyTextResponse(api_response.text) + def log_and_reject_error(description, status_code, request_id, http_status=None, grpc_code=None, details=None, logger = None): raise SkyflowError(description, status_code, request_id, grpc_code, http_status, details) @@ -390,6 +421,8 @@ def handle_json_error(err, data, request_id, logger): try: if isinstance(data, dict): # If data is already a dict description = data + elif isinstance(data, ErrorResponse): + description = data.dict() else: description = json.loads(data) status_code = description.get('error', {}).get('http_code', 500) # Default to 500 if not found diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 22cfc2df..0cd8592c 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b3' \ No newline at end of file +SDK_VERSION = '2.0.0b6' \ No newline at end of file diff --git a/skyflow/utils/enums/__init__.py b/skyflow/utils/enums/__init__.py index 5456737b..af293ce2 100644 --- a/skyflow/utils/enums/__init__.py +++ b/skyflow/utils/enums/__init__.py @@ -1,6 +1,12 @@ from .env import Env, EnvUrls from .log_level import LogLevel from .content_types import ContentType +from .detect_entities import DetectEntities from .token_mode import TokenMode +from .token_type import TokenType from .request_method import RequestMethod -from .redaction_type import RedactionType \ No newline at end of file +from .redaction_type import RedactionType +from .detect_entities import DetectEntities +from .detect_output_transcriptions import DetectOutputTranscriptions +from .masking_method import MaskingMethod +from .token_type import TokenType \ No newline at end of file diff --git a/skyflow/utils/enums/detect_entities.py b/skyflow/utils/enums/detect_entities.py new file mode 100644 index 00000000..23b36bdc --- /dev/null +++ b/skyflow/utils/enums/detect_entities.py @@ -0,0 +1,66 @@ +from enum import Enum + +class DetectEntities(Enum): + ACCOUNT_NUMBER = "account_number" + AGE = "age" + ALL = "all" + BANK_ACCOUNT = "bank_account" + BLOOD_TYPE = "blood_type" + CONDITION = "condition" + CORPORATE_ACTION = "corporate_action" + CREDIT_CARD = "credit_card" + CREDIT_CARD_EXPIRATION = "credit_card_expiration" + CVV = "cvv" + DATE = "date" + DATE_INTERVAL = "date_interval" + DOB = "dob" + DOSE = "dose" + DRIVER_LICENSE = "driver_license" + DRUG = "drug" + DURATION = "duration" + EMAIL_ADDRESS = "email_address" + EVENT = "event" + FILENAME = "filename" + FINANCIAL_METRIC = "financial_metric" + GENDER_SEXUALITY = "gender_sexuality" + HEALTHCARE_NUMBER = "healthcare_number" + INJURY = "injury" + IP_ADDRESS = "ip_address" + LANGUAGE = "language" + LOCATION = "location" + LOCATION_ADDRESS = "location_address" + LOCATION_ADDRESS_STREET = "location_address_street" + LOCATION_CITY = "location_city" + LOCATION_COORDINATE = "location_coordinate" + LOCATION_COUNTRY = "location_country" + LOCATION_STATE = "location_state" + LOCATION_ZIP = "location_zip" + MARITAL_STATUS = "marital_status" + MEDICAL_CODE = "medical_code" + MEDICAL_PROCESS = "medical_process" + MONEY = "money" + NAME = "name" + NAME_FAMILY = "name_family" + NAME_GIVEN = "name_given" + NAME_MEDICAL_PROFESSIONAL = "name_medical_professional" + NUMERICAL_PII = "numerical_pii" + OCCUPATION = "occupation" + ORGANIZATION = "organization" + ORGANIZATION_MEDICAL_FACILITY = "organization_medical_facility" + ORIGIN = "origin" + PASSPORT_NUMBER = "passport_number" + PASSWORD = "password" + PHONE_NUMBER = "phone_number" + PHYSICAL_ATTRIBUTE = "physical_attribute" + POLITICAL_AFFILIATION = "political_affiliation" + PRODUCT = "product" + RELIGION = "religion" + ROUTING_NUMBER = "routing_number" + SSN = "ssn" + STATISTICS = "statistics" + TIME = "time" + TREND = "trend" + URL = "url" + USERNAME = "username" + VEHICLE_ID = "vehicle_id" + ZODIAC_SIGN = "zodiac_sign" \ No newline at end of file diff --git a/skyflow/utils/enums/detect_output_transcriptions.py b/skyflow/utils/enums/detect_output_transcriptions.py new file mode 100644 index 00000000..69f94d79 --- /dev/null +++ b/skyflow/utils/enums/detect_output_transcriptions.py @@ -0,0 +1,8 @@ +from enum import Enum + +class DetectOutputTranscriptions(Enum): + DIARIZED_TRANSCRIPTION = "diarized_transcription" + MEDICAL_DIARIZED_TRANSCRIPTION = "medical_diarized_transcription" + MEDICAL_TRANSCRIPTION = "medical_transcription" + PLAINTEXT_TRANSCRIPTION = "plaintext_transcription" + TRANSCRIPTION = "transcription" \ No newline at end of file diff --git a/skyflow/utils/enums/masking_method.py b/skyflow/utils/enums/masking_method.py new file mode 100644 index 00000000..a322f35f --- /dev/null +++ b/skyflow/utils/enums/masking_method.py @@ -0,0 +1,5 @@ +from enum import Enum + +class MaskingMethod(Enum): + BLACKBOX= "blackbox" + BLUR= "blur" \ No newline at end of file diff --git a/skyflow/utils/enums/token_type.py b/skyflow/utils/enums/token_type.py new file mode 100644 index 00000000..9e9e5fcf --- /dev/null +++ b/skyflow/utils/enums/token_type.py @@ -0,0 +1,6 @@ +from enum import Enum + +class TokenType(Enum): + VAULT_TOKEN = "vault_token" + ENTITY_UNIQUE_COUNTER = "entity_unq_counter" + ENTITY_ONLY = "entity_only" diff --git a/skyflow/utils/validations/__init__.py b/skyflow/utils/validations/__init__.py index 17bc49a7..b8ce13c8 100644 --- a/skyflow/utils/validations/__init__.py +++ b/skyflow/utils/validations/__init__.py @@ -13,4 +13,8 @@ validate_detokenize_request, validate_tokenize_request, validate_invoke_connection_params, + validate_deidentify_text_request, + validate_reidentify_text_request, + validate_deidentify_file_request, + validate_get_detect_run_request, ) \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index 93d10468..0ff9f038 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -1,9 +1,14 @@ import json +import os +from skyflow.generated.rest import TokenType from skyflow.service_account import is_expired -from skyflow.utils.enums import LogLevel, Env, RedactionType, TokenMode +from skyflow.utils.enums import LogLevel, Env, RedactionType, TokenMode, DetectEntities, DetectOutputTranscriptions, \ + MaskingMethod from skyflow.error import SkyflowError from skyflow.utils import SkyflowMessages from skyflow.utils.logger import log_info, log_error_log +from skyflow.vault.detect import DeidentifyTextRequest, ReidentifyTextRequest, TokenFormat, Transformations, \ + GetDetectRunRequest, Bleep, DeidentifyFileRequest valid_vault_config_keys = ["vault_id", "cluster_id", "credentials", "env"] valid_connection_config_keys = ["connection_id", "connection_url", "credentials"] @@ -252,6 +257,106 @@ def validate_update_connection_config(logger, config): return True +def validate_deidentify_file_request(logger, request: DeidentifyFileRequest): + if not hasattr(request, 'file') or request.file is None: + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_INPUT.value, invalid_input_error_code) + + # Optional: entities + if hasattr(request, 'entities') and request.entities is not None: + if not isinstance(request.entities, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_DETECT_ENTITIES_TYPE.value, invalid_input_error_code) + + if not all(isinstance(entity, DetectEntities) for entity in request.entities): + raise SkyflowError(SkyflowMessages.Error.INVALID_DETECT_ENTITIES_TYPE.value, invalid_input_error_code) + + # Optional: allow_regex_list + if hasattr(request, 'allow_regex_list') and request.allow_regex_list is not None: + if not isinstance(request.allow_regex_list, list) or not all(isinstance(x, str) for x in request.allow_regex_list): + raise SkyflowError(SkyflowMessages.Error.INVALID_ALLOW_REGEX_LIST.value, invalid_input_error_code) + + # Optional: restrict_regex_list + if hasattr(request, 'restrict_regex_list') and request.restrict_regex_list is not None: + if not isinstance(request.restrict_regex_list, list) or not all(isinstance(x, str) for x in request.restrict_regex_list): + raise SkyflowError(SkyflowMessages.Error.INVALID_RESTRICT_REGEX_LIST.value, invalid_input_error_code) + + # Optional: token_format + if request.token_format is not None and not isinstance(request.token_format, TokenFormat): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_FORMAT.value, invalid_input_error_code) + + # Optional: transformations + if request.transformations is not None and not isinstance(request.transformations, Transformations): + raise SkyflowError(SkyflowMessages.Error.INVALID_TRANSFORMATIONS.value, invalid_input_error_code) + + # Optional: output_processed_image + if hasattr(request, 'output_processed_image') and request.output_processed_image is not None: + if not isinstance(request.output_processed_image, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_OUTPUT_PROCESSED_IMAGE.value, invalid_input_error_code) + + # Optional: output_ocr_text + if hasattr(request, 'output_ocr_text') and request.output_ocr_text is not None: + if not isinstance(request.output_ocr_text, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_OUTPUT_OCR_TEXT.value, invalid_input_error_code) + + # Optional: masking_method + # Optional: masking_method + if hasattr(request, 'masking_method') and request.masking_method is not None: + if not isinstance(request.masking_method, MaskingMethod): + raise SkyflowError(SkyflowMessages.Error.INVALID_MASKING_METHOD.value, invalid_input_error_code) + + # Optional: pixel_density + if hasattr(request, 'pixel_density') and request.pixel_density is not None: + if not isinstance(request.pixel_density, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_PIXEL_DENSITY.value, invalid_input_error_code) + + # Optional: max_resolution + if hasattr(request, 'max_resolution') and request.max_resolution is not None: + if not isinstance(request.max_resolution, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_MAXIMUM_RESOLUTION.value, invalid_input_error_code) + + # Optional: output_processed_audio + if hasattr(request, 'output_processed_audio') and request.output_processed_audio is not None: + if not isinstance(request.output_processed_audio, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_OUTPUT_PROCESSED_AUDIO.value, invalid_input_error_code) + + # Optional: output_transcription + if hasattr(request, 'output_transcription') and request.output_transcription is not None: + if not isinstance(request.output_transcription, DetectOutputTranscriptions): + raise SkyflowError(SkyflowMessages.Error.INVALID_OUTPUT_TRANSCRIPTION.value, invalid_input_error_code) + + # Optional: bleep + if hasattr(request, 'bleep') and request.bleep is not None: + if not isinstance(request.bleep, Bleep): + raise SkyflowError(SkyflowMessages.Error.INVALID_BLEEP_TYPE.value, invalid_input_error_code) + + # Validate gain + if request.bleep.gain is not None and not isinstance(request.bleep.gain, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_BLEEP_GAIN.value, invalid_input_error_code) + + # Validate frequency + if request.bleep.frequency is not None and not isinstance(request.bleep.frequency, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_BLEEP_FREQUENCY.value, invalid_input_error_code) + + # Validate start_padding + if request.bleep.start_padding is not None and not isinstance(request.bleep.start_padding, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_BLEEP_START_PADDING.value, invalid_input_error_code) + + # Validate stop_padding + if request.bleep.stop_padding is not None and not isinstance(request.bleep.stop_padding, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_BLEEP_STOP_PADDING.value, invalid_input_error_code) + + # Optional: output_directory + if hasattr(request, 'output_directory') and request.output_directory is not None: + if not isinstance(request.output_directory, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_OUTPUT_DIRECTORY_VALUE.value, invalid_input_error_code) + if not os.path.isdir(request.output_directory): + raise SkyflowError(SkyflowMessages.Error.OUTPUT_DIRECTORY_NOT_FOUND.value.format(request.output_directory), invalid_input_error_code) + + # Optional: wait_time + if hasattr(request, 'wait_time') and request.wait_time is not None: + if not isinstance(request.wait_time, (int, float)): + raise SkyflowError(SkyflowMessages.Error.INVALID_WAIT_TIME.value, invalid_input_error_code) + if request.wait_time < 0 and request.wait_time > 64: + raise SkyflowError(SkyflowMessages.Error.WAIT_TIME_GREATER_THEN_64.value, invalid_input_error_code) def validate_insert_request(logger, request): if not isinstance(request.table_name, str): @@ -572,3 +677,48 @@ def validate_invoke_connection_params(logger, query_params, path_params): json.dumps(query_params) except TypeError: raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) + +def validate_deidentify_text_request(self, request: DeidentifyTextRequest): + if not request.text or not isinstance(request.text, str) or not request.text.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_TEXT_IN_DEIDENTIFY.value, invalid_input_error_code) + + # Validate entities if present + if request.entities is not None and not isinstance(request.entities, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_ENTITIES_IN_DEIDENTIFY.value, invalid_input_error_code) + + # Validate allowed_regex_list if present + if request.allow_regex_list is not None and not isinstance(request.allow_regex_list, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_ALLOW_REGEX_LIST.value, invalid_input_error_code) + + # Validate restricted_regex_list if present + if request.restrict_regex_list is not None and not isinstance(request.restrict_regex_list, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_RESTRICT_REGEX_LIST.value, invalid_input_error_code) + + # Validate token_format if present + if request.token_format is not None and not isinstance(request.token_format, TokenFormat): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_FORMAT.value, invalid_input_error_code) + + # Validate transformations if present + if request.transformations is not None and not isinstance(request.transformations, Transformations): + raise SkyflowError(SkyflowMessages.Error.INVALID_TRANSFORMATIONS.value, invalid_input_error_code) + +def validate_reidentify_text_request(self, request: ReidentifyTextRequest): + if not request.text or not isinstance(request.text, str) or not request.text.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_TEXT_IN_REIDENTIFY.value, invalid_input_error_code) + + # Validate redacted_entities if present + if request.redacted_entities is not None and not isinstance(request.redacted_entities, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTED_ENTITIES_IN_REIDENTIFY.value, invalid_input_error_code) + + # Validate masked_entities if present + if request.masked_entities is not None and not isinstance(request.masked_entities, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_MASKED_ENTITIES_IN_REIDENTIFY.value, invalid_input_error_code) + + # Validate plain_text_entities if present + if request.plain_text_entities is not None and not isinstance(request.plain_text_entities, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_PLAIN_TEXT_ENTITIES_IN_REIDENTIFY.value, invalid_input_error_code) + +def validate_get_detect_run_request(self, request: GetDetectRunRequest): + if not request.run_id or not isinstance(request.run_id, str) or not request.run_id.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_RUN_ID.value, invalid_input_error_code) + diff --git a/skyflow/vault/client/client.py b/skyflow/vault/client/client.py index e3e543ae..f47a525c 100644 --- a/skyflow/vault/client/client.py +++ b/skyflow/vault/client/client.py @@ -42,6 +42,12 @@ def get_tokens_api(self): def get_query_api(self): return self.__api_client.query + + def get_detect_text_api(self): + return self.__api_client.strings + + def get_detect_file_api(self): + return self.__api_client.files def get_vault_id(self): return self.__config.get("vault_id") diff --git a/skyflow/vault/controller/__init__.py b/skyflow/vault/controller/__init__.py index 14301fb3..681153c0 100644 --- a/skyflow/vault/controller/__init__.py +++ b/skyflow/vault/controller/__init__.py @@ -1,2 +1,3 @@ from ._vault import Vault -from ._connections import Connection \ No newline at end of file +from ._connections import Connection +from ._detect import Detect \ No newline at end of file diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py new file mode 100644 index 00000000..1dbd533c --- /dev/null +++ b/skyflow/vault/controller/_detect.py @@ -0,0 +1,410 @@ +import json +import os +from skyflow.error import SkyflowError +from skyflow.generated.rest.types.token_type import TokenType +from skyflow.generated.rest.types.transformations import Transformations +from skyflow.generated.rest.types.transformations_shift_dates import TransformationsShiftDates +import base64 +import time +from skyflow.generated.rest import DeidentifyTextRequestFile, DeidentifyAudioRequestFile, DeidentifyPdfRequestFile, \ + DeidentifyImageRequestFile, DeidentifyPresentationRequestFile, DeidentifySpreadsheetRequestFile, \ + DeidentifyDocumentRequestFile, DeidentifyFileRequestFile +from skyflow.utils._skyflow_messages import SkyflowMessages +from skyflow.utils._utils import get_metrics, handle_exception, parse_deidentify_text_response, parse_reidentify_text_response +from skyflow.utils.constants import SKY_META_DATA_HEADER +from skyflow.utils.logger import log_info, log_error_log +from skyflow.utils.validations import validate_deidentify_file_request, validate_get_detect_run_request +from skyflow.utils.validations._validations import validate_deidentify_text_request, validate_reidentify_text_request +from typing import Dict, Any +from skyflow.generated.rest.strings.types.reidentify_string_request_format import ReidentifyStringRequestFormat +from skyflow.vault.detect import DeidentifyTextRequest, DeidentifyTextResponse, ReidentifyTextRequest, \ + ReidentifyTextResponse, DeidentifyFileRequest, DeidentifyFileResponse, GetDetectRunRequest + +class Detect: + def __init__(self, vault_client): + self.__vault_client = vault_client + + def __initialize(self): + self.__vault_client.initialize_client_configuration() + + def __get_headers(self): + headers = { + SKY_META_DATA_HEADER: json.dumps(get_metrics()) + } + return headers + + def ___build_deidentify_text_body(self, request: DeidentifyTextRequest) -> Dict[str, Any]: + deidentify_text_body = {} + parsed_entity_types = request.entities + + deidentify_text_body['text'] = request.text + deidentify_text_body['entity_types'] = parsed_entity_types + deidentify_text_body['token_type'] = self.__get_token_format(request) + deidentify_text_body['allow_regex'] = request.allow_regex_list + deidentify_text_body['restrict_regex'] = request.restrict_regex_list + deidentify_text_body['transformations'] = self.__get_transformations(request) + + return deidentify_text_body + + def ___build_reidentify_text_body(self, request: ReidentifyTextRequest) -> Dict[str, Any]: + parsed_format = ReidentifyStringRequestFormat( + redacted=request.redacted_entities, + masked=request.masked_entities, + plaintext=request.plain_text_entities + ) + reidentify_text_body = {} + reidentify_text_body['text'] = request.text + reidentify_text_body['format'] = parsed_format + return reidentify_text_body + + def _get_file_extension(self, filename: str): + return filename.split('.')[-1].lower() if '.' in filename else '' + + def __poll_for_processed_file(self, run_id, max_wait_time=64): + max_wait_time = 64 if max_wait_time is None else max_wait_time + files_api = self.__vault_client.get_detect_file_api().with_raw_response + current_wait_time = 1 # Start with 1 second + try: + while True: + response = files_api.get_run(run_id, vault_id=self.__vault_client.get_vault_id(), request_options=self.__get_headers()).data + status = response.status + if status == 'IN_PROGRESS': + if current_wait_time >= max_wait_time: + return DeidentifyFileResponse(run_id=run_id, status='IN_PROGRESS') + else: + next_wait_time = current_wait_time * 2 + if next_wait_time >= max_wait_time: + wait_time = max_wait_time - current_wait_time + current_wait_time = max_wait_time + else: + wait_time = next_wait_time + current_wait_time = next_wait_time + time.sleep(wait_time) + elif status == 'SUCCESS' or status == 'FAILED': + return response + except Exception as e: + raise e + + def __parse_deidentify_file_response(self, data, run_id=None, status=None): + + output = getattr(data, "output", []) + output_type = getattr(data, "output_type", None) + word_character_count = getattr(data, "word_character_count", None) + size = getattr(data, "size", None) + duration = getattr(data, "duration", None) + pages = getattr(data, "pages", None) + slides = getattr(data, "slides", None) + message = getattr(data, "message", None) + status_val = getattr(data, "status", None) or status + run_id_val = getattr(data, "run_id", None) or run_id + + # Convert output to list of dicts if it's a list of objects + def output_to_dict_list(output): + result = [] + for o in output: + if isinstance(o, dict): + result.append({ + "file": o.get("processedFile") or o.get("processed_file"), + "type": o.get("processedFileType") or o.get("processed_file_type"), + "extension": o.get("processedFileExtension") or o.get("processed_file_extension") + }) + else: + result.append({ + "file": getattr(o, "processed_file", None), + "type": getattr(o, "processed_file_type", None), + "extension": getattr(o, "processed_file_extension", None) + }) + return result + + output_list = output_to_dict_list(output) + first_output = output_list[0] if output_list else {} + + entities = [o for o in output_list if o.get("type") == "entities"] + + word_count = getattr(word_character_count, "word_count", None) + char_count = getattr(word_character_count, "character_count", None) + + return DeidentifyFileResponse( + file=first_output.get("file", None), + type=first_output.get("type", None), + extension=first_output.get("extension", None), + word_count=word_count, + char_count=char_count, + size_in_kb=size, + duration_in_seconds=duration, + page_count=pages, + slide_count=slides, + entities=entities, + run_id=run_id_val, + status=status_val, + errors=[] + ) + + def __get_token_format(self, request): + if not hasattr(request, "token_format") or request.token_format is None: + return None + return { + 'default': getattr(request.token_format, "default", None), + 'entity_unq_counter': getattr(request.token_format, "entity_unique_counter", None), + 'entity_only': getattr(request.token_format, "entity_only", None), + 'vault_token': getattr(request.token_format, "vault_token", None) + } + + def __get_transformations(self, request): + if not hasattr(request, "transformations") or request.transformations is None: + return None + shift_dates = getattr(request.transformations, "shift_dates", None) + if shift_dates is None: + return None + return { + 'shift_dates': { + 'max_days': getattr(shift_dates, "max", None), + 'min_days': getattr(shift_dates, "min", None), + 'entity_types': getattr(shift_dates, "entities", None) + } + } + + def deidentify_text(self, request: DeidentifyTextRequest) -> DeidentifyTextResponse: + log_info(SkyflowMessages.Info.VALIDATING_DEIDENTIFY_TEXT_INPUT.value, self.__vault_client.get_logger()) + validate_deidentify_text_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DEIDENTIFY_TEXT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + detect_api = self.__vault_client.get_detect_text_api() + deidentify_text_body = self.___build_deidentify_text_body(request) + + try: + log_info(SkyflowMessages.Info.DEIDENTIFY_TEXT_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = detect_api.deidentify_string( + vault_id=self.__vault_client.get_vault_id(), + text=deidentify_text_body['text'], + entity_types=deidentify_text_body['entity_types'], + allow_regex=deidentify_text_body['allow_regex'], + restrict_regex=deidentify_text_body['restrict_regex'], + token_type=deidentify_text_body['token_type'], + transformations=deidentify_text_body['transformations'], + request_options=self.__get_headers() + ) + deidentify_text_response = parse_deidentify_text_response(api_response) + log_info(SkyflowMessages.Info.DEIDENTIFY_TEXT_SUCCESS.value, self.__vault_client.get_logger()) + return deidentify_text_response + + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DEIDENTIFY_TEXT_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def reidentify_text(self, request: ReidentifyTextRequest) -> ReidentifyTextResponse: + log_info(SkyflowMessages.Info.VALIDATING_REIDENTIFY_TEXT_INPUT.value, self.__vault_client.get_logger()) + validate_reidentify_text_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.REIDENTIFY_TEXT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + detect_api = self.__vault_client.get_detect_text_api() + reidentify_text_body = self.___build_reidentify_text_body(request) + + try: + log_info(SkyflowMessages.Info.REIDENTIFY_TEXT_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = detect_api.reidentify_string( + vault_id=self.__vault_client.get_vault_id(), + text=reidentify_text_body['text'], + format=reidentify_text_body['format'], + request_options=self.__get_headers() + ) + reidentify_text_response = parse_reidentify_text_response(api_response) + log_info(SkyflowMessages.Info.REIDENTIFY_TEXT_SUCCESS.value, self.__vault_client.get_logger()) + return reidentify_text_response + + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.REIDENTIFY_TEXT_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def deidentify_file(self, request: DeidentifyFileRequest): + log_info(SkyflowMessages.Info.DETECT_FILE_TRIGGERED.value, self.__vault_client.get_logger()) + validate_deidentify_file_request(self.__vault_client.get_logger(), request) + self.__initialize() + files_api = self.__vault_client.get_detect_file_api().with_raw_response + file_obj = request.file + file_name = getattr(file_obj, 'name', None) + file_extension = self._get_file_extension(file_name) if file_name else None + file_content = file_obj.read() + + base64_string = base64.b64encode(file_content).decode('utf-8') + + try: + if file_extension == 'txt': + req_file = DeidentifyTextRequestFile(base_64=base64_string, data_format="txt") + api_call = files_api.deidentify_text + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'transformations': self.__get_transformations(request), + 'request_options': self.__get_headers() + } + + elif file_extension in ['mp3', 'wav']: + req_file = DeidentifyAudioRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_audio + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'transformations': self.__get_transformations(request), + 'output_transcription': getattr(request, 'output_transcription', None), + 'output_processed_audio': getattr(request, 'output_processed_audio', None), + 'bleep_gain': getattr(request, 'bleep', None).gain if getattr(request, 'bleep', None) is not None else None, + 'bleep_frequency': getattr(request, 'bleep', None).frequency if getattr(request, 'bleep', None) is not None else None, + 'bleep_start_padding': getattr(request, 'bleep', None).start_padding if getattr(request, 'bleep', None) is not None else None, + 'bleep_stop_padding': getattr(request, 'bleep', None).stop_padding if getattr(request, 'bleep', None) is not None else None, + 'request_options': self.__get_headers() + } + + elif file_extension == 'pdf': + req_file = DeidentifyPdfRequestFile(base_64=base64_string) + api_call = files_api.deidentify_pdf + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'max_resolution': getattr(request, 'max_resolution', None), + 'density': getattr(request, 'pixel_density', None), + 'request_options': self.__get_headers() + } + + elif file_extension in ['jpeg', 'jpg', 'png', 'bmp', 'tif', 'tiff']: + req_file = DeidentifyImageRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_image + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'masking_method': getattr(request, 'masking_method', None), + 'output_ocr_text': getattr(request, 'output_ocr_text', None), + 'output_processed_image': getattr(request, 'output_processed_image', None), + 'request_options': self.__get_headers() + } + + elif file_extension in ['ppt', 'pptx']: + req_file = DeidentifyPresentationRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_presentation + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'request_options': self.__get_headers() + } + + elif file_extension in ['csv', 'xls', 'xlsx']: + req_file = DeidentifySpreadsheetRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_spreadsheet + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'transformations': self.__get_transformations(request), + 'request_options': self.__get_headers() + } + + elif file_extension in ['doc', 'docx']: + req_file = DeidentifyDocumentRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_document + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'request_options': self.__get_headers() + } + + elif file_extension in ['json', 'xml']: + from skyflow.generated.rest.files.types.deidentify_structured_text_request_file import \ + DeidentifyStructuredTextRequestFile + req_file = DeidentifyStructuredTextRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_structured_text + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'transformations': self.__get_transformations(request), + 'request_options': self.__get_headers() + } + + else: + req_file = DeidentifyFileRequestFile(base_64=base64_string, data_format=file_extension) + api_call = files_api.deidentify_file + api_kwargs = { + 'vault_id': self.__vault_client.get_vault_id(), + 'file': req_file, + 'entity_types': request.entities, + 'token_type': self.__get_token_format(request), + 'allow_regex': request.allow_regex_list, + 'restrict_regex': request.restrict_regex_list, + 'transformations': self.__get_transformations(request), + 'request_options': self.__get_headers() + } + + log_info(SkyflowMessages.Info.DETECT_FILE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + api_response = api_call(**api_kwargs) + + run_id = getattr(api_response.data, 'run_id', None) + + processed_response = self.__poll_for_processed_file(run_id, request.wait_time) + parsed_response = self.__parse_deidentify_file_response(processed_response, run_id) + if request.output_directory and processed_response.status == 'SUCCESS': + file_name_only = 'processed-'+os.path.basename(file_name) + output_file_path = f"{request.output_directory}/{file_name_only}" + with open(output_file_path, 'wb') as output_file: + output_file.write(base64.b64decode(parsed_response.file)) + log_info(SkyflowMessages.Info.DETECT_FILE_SUCCESS.value, self.__vault_client.get_logger()) + return parsed_response + + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DETECT_FILE_REQUEST_REJECTED.value, + self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def get_detect_run(self, request: GetDetectRunRequest): + log_info(SkyflowMessages.Info.VALIDATING_GET_DETECT_RUN_INPUT.value, self.__vault_client.get_logger()) + validate_get_detect_run_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DEIDENTIFY_TEXT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + + files_api = self.__vault_client.get_detect_file_api().with_raw_response + run_id = request.run_id + try: + response = files_api.get_run( + run_id, + vault_id=self.__vault_client.get_vault_id(), + request_options=self.__get_headers() + ) + if response.data.status == 'IN_PROGRESS': + parsed_response = self.__parse_deidentify_file_response(DeidentifyFileResponse(run_id=run_id, status='IN_PROGRESS')) + else: + parsed_response = self.__parse_deidentify_file_response(response.data, run_id, response.data.status) + return parsed_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DETECT_FILE_REQUEST_REJECTED.value, + self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + diff --git a/skyflow/vault/detect/__init__.py b/skyflow/vault/detect/__init__.py new file mode 100644 index 00000000..e385a1f2 --- /dev/null +++ b/skyflow/vault/detect/__init__.py @@ -0,0 +1,13 @@ +from ._date_transformation import DateTransformation +from ._deidentify_text_request import DeidentifyTextRequest +from ._deidentify_text_response import DeidentifyTextResponse +from ._entity_info import EntityInfo +from ._reidentify_text_request import ReidentifyTextRequest +from ._reidentify_text_response import ReidentifyTextResponse +from ._text_index import TextIndex +from ._token_format import TokenFormat +from ._transformations import Transformations +from ._deidentify_file_request import DeidentifyFileRequest +from ._audio_bleep import Bleep +from ._deidentify_file_response import DeidentifyFileResponse +from ._get_detect_run_request import GetDetectRunRequest \ No newline at end of file diff --git a/skyflow/vault/detect/_audio_bleep.py b/skyflow/vault/detect/_audio_bleep.py new file mode 100644 index 00000000..745b7d46 --- /dev/null +++ b/skyflow/vault/detect/_audio_bleep.py @@ -0,0 +1,14 @@ +from typing import Optional + +class Bleep: + def __init__( + self, + gain: Optional[float] = None, + frequency: Optional[float] = None, + start_padding: Optional[float] = None, + stop_padding: Optional[float] = None + ): + self.gain = gain + self.frequency = frequency + self.start_padding = start_padding + self.stop_padding = stop_padding \ No newline at end of file diff --git a/skyflow/vault/detect/_date_transformation.py b/skyflow/vault/detect/_date_transformation.py new file mode 100644 index 00000000..c12199c2 --- /dev/null +++ b/skyflow/vault/detect/_date_transformation.py @@ -0,0 +1,8 @@ +from typing import List +from skyflow.utils.enums.detect_entities import DetectEntities + +class DateTransformation: + def __init__(self, max_days: int, min_days: int, entities: List[DetectEntities]): + self.max = max_days + self.min = min_days + self.entities = entities diff --git a/skyflow/vault/detect/_deidentify_file_request.py b/skyflow/vault/detect/_deidentify_file_request.py new file mode 100644 index 00000000..a429f5d5 --- /dev/null +++ b/skyflow/vault/detect/_deidentify_file_request.py @@ -0,0 +1,42 @@ +from typing import List, Optional, Union +from skyflow.utils.enums import DetectEntities +from skyflow.vault.detect import TokenFormat, Transformations +from skyflow.vault.detect._audio_bleep import Bleep +from skyflow.utils.enums import MaskingMethod, DetectOutputTranscriptions + +class DeidentifyFileRequest: + def __init__( + self, + file = None, + entities: Optional[List[DetectEntities]] = None, + allow_regex_list: Optional[List[str]] = None, + restrict_regex_list: Optional[List[str]] = None, + token_format: Optional[TokenFormat] = None, + transformations: Optional[Transformations] = None, + output_processed_image: Optional[bool] = None, + output_ocr_text: Optional[bool] = None, + masking_method: Optional[MaskingMethod] = None, + pixel_density: Optional[Union[int, float]] = None, + max_resolution: Optional[Union[int, float]] = None, + output_processed_audio: Optional[bool] = None, + output_transcription: Optional[DetectOutputTranscriptions] = None, + bleep: Optional[Bleep] = None, + output_directory: Optional[str] = None, + wait_time: Optional[Union[int, float]] = None + ): + self.file: object = file + self.entities: Optional[List[DetectEntities]] = entities + self.allow_regex_list: Optional[List[str]] = allow_regex_list + self.restrict_regex_list: Optional[List[str]] = restrict_regex_list + self.token_format: Optional[TokenFormat] = token_format + self.transformations: Optional[Transformations] = transformations + self.output_processed_image: Optional[bool] = output_processed_image + self.output_ocr_text: Optional[bool] = output_ocr_text + self.masking_method: Optional[MaskingMethod] = masking_method + self.pixel_density: Optional[Union[int, float]] = pixel_density + self.max_resolution: Optional[Union[int, float]] = max_resolution + self.output_processed_audio: Optional[bool] = output_processed_audio + self.output_transcription: Optional[DetectOutputTranscriptions] = output_transcription + self.bleep: Optional[Bleep] = bleep + self.output_directory: Optional[str] = output_directory + self.wait_time: Optional[Union[int, float]] = wait_time \ No newline at end of file diff --git a/skyflow/vault/detect/_deidentify_file_response.py b/skyflow/vault/detect/_deidentify_file_response.py new file mode 100644 index 00000000..f386080d --- /dev/null +++ b/skyflow/vault/detect/_deidentify_file_response.py @@ -0,0 +1,44 @@ +class DeidentifyFileResponse: + def __init__( + self, + file: str = None, + type: str = None, + extension: str = None, + word_count: int = None, + char_count: int = None, + size_in_kb: float = None, + duration_in_seconds: float = None, + page_count: int = None, + slide_count: int = None, + entities: list = None, # list of dicts with keys 'file' and 'extension' + run_id: str = None, + status: str = None, + errors: list = [], + ): + self.file = file + self.type = type + self.extension = extension + self.word_count = word_count + self.char_count = char_count + self.size_in_kb = size_in_kb + self.duration_in_seconds = duration_in_seconds + self.page_count = page_count + self.slide_count = slide_count + self.entities = entities if entities is not None else [] + self.run_id = run_id + self.status = status + self.errors = errors + + def __repr__(self): + return ( + f"DeidentifyFileResponse(" + f"file={self.file!r}, type={self.type!r}, extension={self.extension!r}, " + f"word_count={self.word_count!r}, char_count={self.char_count!r}, " + f"size_in_kb={self.size_in_kb!r}, duration_in_seconds={self.duration_in_seconds!r}, " + f"page_count={self.page_count!r}, slide_count={self.slide_count!r}, " + f"entities={self.entities!r}, run_id={self.run_id!r}, status={self.status!r})," + f"errors={self.errors!r})" + ) + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/detect/_deidentify_text_request.py b/skyflow/vault/detect/_deidentify_text_request.py new file mode 100644 index 00000000..8b4a440d --- /dev/null +++ b/skyflow/vault/detect/_deidentify_text_request.py @@ -0,0 +1,19 @@ +from typing import List, Optional +from skyflow.utils.enums.detect_entities import DetectEntities +from ._token_format import TokenFormat +from ._transformations import Transformations + +class DeidentifyTextRequest: + def __init__(self, + text: str, + entities: Optional[List[DetectEntities]] = None, + allow_regex_list: Optional[List[str]] = None, + restrict_regex_list: Optional[List[str]] = None, + token_format: Optional[TokenFormat] = None, + transformations: Optional[Transformations] = None): + self.text = text + self.entities = entities + self.allow_regex_list = allow_regex_list + self.restrict_regex_list = restrict_regex_list + self.token_format = token_format + self.transformations = transformations diff --git a/skyflow/vault/detect/_deidentify_text_response.py b/skyflow/vault/detect/_deidentify_text_response.py new file mode 100644 index 00000000..cdb6632e --- /dev/null +++ b/skyflow/vault/detect/_deidentify_text_response.py @@ -0,0 +1,19 @@ +from typing import List +from ._entity_info import EntityInfo + +class DeidentifyTextResponse: + def __init__(self, + processed_text: str, + entities: List[EntityInfo], + word_count: int, + char_count: int): + self.processed_text = processed_text + self.entities = entities + self.word_count = word_count + self.char_count = char_count + + def __repr__(self): + return f"DeidentifyTextResponse(processed_text='{self.processed_text}', entities={self.entities}, word_count={self.word_count}, char_count={self.char_count})" + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/detect/_entity_info.py b/skyflow/vault/detect/_entity_info.py new file mode 100644 index 00000000..1eb5f132 --- /dev/null +++ b/skyflow/vault/detect/_entity_info.py @@ -0,0 +1,20 @@ +from typing import Dict +from ._text_index import TextIndex + +class EntityInfo: + def __init__(self, token: str, value: str, text_index: TextIndex, + processed_index: TextIndex, entity: str, scores: Dict[str, float]): + self.token = token + self.value = value + self.text_index = text_index + self.processed_index = processed_index + self.entity = entity + self.scores = scores + + def __repr__(self) -> str: + return (f"EntityInfo(token='{self.token}', value='{self.value}', " + f"text_index={self.text_index}, processed_index={self.processed_index}, " + f"entity='{self.entity}', scores={self.scores})") + + def __str__(self) -> str: + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/detect/_get_detect_run_request.py b/skyflow/vault/detect/_get_detect_run_request.py new file mode 100644 index 00000000..14426458 --- /dev/null +++ b/skyflow/vault/detect/_get_detect_run_request.py @@ -0,0 +1,6 @@ +class GetDetectRunRequest: + def __init__( + self, + run_id: str, + ): + self.run_id: str = run_id \ No newline at end of file diff --git a/skyflow/vault/detect/_reidentify_text_request.py b/skyflow/vault/detect/_reidentify_text_request.py new file mode 100644 index 00000000..7a7a744d --- /dev/null +++ b/skyflow/vault/detect/_reidentify_text_request.py @@ -0,0 +1,12 @@ +from typing import List, Optional +from skyflow.utils.enums.detect_entities import DetectEntities + +class ReidentifyTextRequest: + def __init__(self, text: str, + redacted_entities: Optional[List[DetectEntities]] = None, + masked_entities: Optional[List[DetectEntities]] = None, + plain_text_entities: Optional[List[DetectEntities]] = None): + self.text = text + self.redacted_entities = redacted_entities + self.masked_entities = masked_entities + self.plain_text_entities = plain_text_entities diff --git a/skyflow/vault/detect/_reidentify_text_response.py b/skyflow/vault/detect/_reidentify_text_response.py new file mode 100644 index 00000000..50c3876d --- /dev/null +++ b/skyflow/vault/detect/_reidentify_text_response.py @@ -0,0 +1,9 @@ +class ReidentifyTextResponse: + def __init__(self, processed_text: str): + self.processed_text = processed_text + + def __repr__(self) -> str: + return f"ReidentifyTextResponse(processed_text='{self.processed_text}')" + + def __str__(self) -> str: + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/detect/_text_index.py b/skyflow/vault/detect/_text_index.py new file mode 100644 index 00000000..add71c53 --- /dev/null +++ b/skyflow/vault/detect/_text_index.py @@ -0,0 +1,10 @@ +class TextIndex: + def __init__(self, start: int, end: int): + self.start = start + self.end = end + + def __repr__(self): + return f"TextIndex(start={self.start}, end={self.end})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/detect/_token_format.py b/skyflow/vault/detect/_token_format.py new file mode 100644 index 00000000..a8d2b2db --- /dev/null +++ b/skyflow/vault/detect/_token_format.py @@ -0,0 +1,13 @@ +from typing import List +from skyflow.utils.enums.detect_entities import DetectEntities +from skyflow.utils.enums.token_type import TokenType + +class TokenFormat: + def __init__(self, default: TokenType = TokenType.ENTITY_UNIQUE_COUNTER, + vault_token: List[DetectEntities] = None, + entity_unique_counter: List[DetectEntities] = None, + entity_only: List[DetectEntities] = None): + self.default = default + self.vault_token = vault_token + self.entity_unique_counter = entity_unique_counter + self.entity_only = entity_only diff --git a/skyflow/vault/detect/_transformations.py b/skyflow/vault/detect/_transformations.py new file mode 100644 index 00000000..af6bff56 --- /dev/null +++ b/skyflow/vault/detect/_transformations.py @@ -0,0 +1,5 @@ +from skyflow.vault.detect._date_transformation import DateTransformation + +class Transformations: + def __init__(self, shift_dates: DateTransformation): + self.shift_dates = shift_dates diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index a7306e7b..6324d9a7 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -7,11 +7,13 @@ from requests import PreparedRequest from requests.models import HTTPError from skyflow.error import SkyflowError +from skyflow.generated.rest import ErrorResponse from skyflow.utils import get_credentials, SkyflowMessages, get_vault_url, construct_invoke_connection_request, \ parse_insert_response, parse_update_record_response, parse_delete_response, parse_get_response, \ parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_invoke_connection_response, \ - handle_exception, validate_api_key, encode_column_values -from skyflow.utils._utils import parse_path_params, to_lowercase_keys, get_metrics + handle_exception, validate_api_key, encode_column_values, parse_deidentify_text_response, \ + parse_reidentify_text_response, convert_detected_entity_to_entity_info +from skyflow.utils._utils import parse_path_params, to_lowercase_keys, get_metrics, handle_json_error from skyflow.utils.enums import EnvUrls, Env, ContentType from skyflow.vault.connection import InvokeConnectionResponse from skyflow.vault.data import InsertResponse, DeleteResponse, GetResponse, QueryResponse @@ -66,6 +68,65 @@ def test_get_vault_url_with_invalid_env(self): url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_ENV.value.format(valid_vault_id)) + @patch("skyflow.utils._utils.log_and_reject_error") + def test_handle_json_error_with_dict_data(self, mock_log_and_reject_error): + """Test handling JSON error when data is already a dict.""" + error_dict = { + "error": { + "message": "Dict error message", + "http_code": 400, + "http_status": "Bad Request", + "grpc_code": 3, + "details": ["detail1"] + } + } + + mock_error = Mock() + mock_logger = Mock() + request_id = "test-request-id" + + handle_json_error(mock_error, error_dict, request_id, mock_logger) + + mock_log_and_reject_error.assert_called_once_with( + "Dict error message", + 400, + request_id, + "Bad Request", + 3, + ["detail1"], + logger=mock_logger + ) + + @patch("skyflow.utils._utils.log_and_reject_error") + def test_handle_json_error_with_error_response_object(self, mock_log_and_reject_error): + """Test handling JSON error when data is an ErrorResponse object.""" + mock_error_response = Mock(spec=ErrorResponse) + mock_error_response.dict.return_value = { + "error": { + "message": "ErrorResponse message", + "http_code": 403, + "http_status": "Forbidden", + "grpc_code": 7, + "details": ["detail2"] + } + } + + mock_error = Mock() + mock_logger = Mock() + request_id = "test-request-id-2" + + handle_json_error(mock_error, mock_error_response, request_id, mock_logger) + + mock_log_and_reject_error.assert_called_once_with( + "ErrorResponse message", + 403, + request_id, + "Forbidden", + 7, + ["detail2"], + logger=mock_logger + ) + def test_parse_path_params(self): url = "https://example.com/{param1}/{param2}" path_params = {"param1": "value1", "param2": "value2"} @@ -418,3 +479,114 @@ def test_encode_column_values(self): result = encode_column_values(get_request) self.assertEqual(result, expected_encoded_values) + + def test_parse_deidentify_text_response(self): + """Test parsing deidentify text response with multiple entities.""" + mock_entity = Mock() + mock_entity.token = "token123" + mock_entity.value = "sensitive_value" + mock_entity.entity_type = "EMAIL" + mock_entity.entity_scores = {"EMAIL": 0.95} + mock_entity.location = Mock( + start_index=10, + end_index=20, + start_index_processed=15, + end_index_processed=25 + ) + + mock_api_response = Mock() + mock_api_response.processed_text = "Sample processed text" + mock_api_response.entities = [mock_entity] + mock_api_response.word_count = 3 + mock_api_response.character_count = 20 + + result = parse_deidentify_text_response(mock_api_response) + + self.assertEqual(result.processed_text, "Sample processed text") + self.assertEqual(result.word_count, 3) + self.assertEqual(result.char_count, 20) + self.assertEqual(len(result.entities), 1) + + entity = result.entities[0] + self.assertEqual(entity.token, "token123") + self.assertEqual(entity.value, "sensitive_value") + self.assertEqual(entity.entity, "EMAIL") + self.assertEqual(entity.scores, {"EMAIL": 0.95}) + self.assertEqual(entity.text_index.start, 10) + self.assertEqual(entity.text_index.end, 20) + self.assertEqual(entity.processed_index.start, 15) + self.assertEqual(entity.processed_index.end, 25) + + def test_parse_deidentify_text_response_no_entities(self): + """Test parsing deidentify text response with no entities.""" + mock_api_response = Mock() + mock_api_response.processed_text = "Sample processed text" + mock_api_response.entities = [] + mock_api_response.word_count = 3 + mock_api_response.character_count = 20 + + result = parse_deidentify_text_response(mock_api_response) + + self.assertEqual(result.processed_text, "Sample processed text") + self.assertEqual(result.word_count, 3) + self.assertEqual(result.char_count, 20) + self.assertEqual(len(result.entities), 0) + + def test_parse_reidentify_text_response(self): + """Test parsing reidentify text response.""" + mock_api_response = Mock() + mock_api_response.text = "Reidentified text with actual values" + + result = parse_reidentify_text_response(mock_api_response) + + self.assertEqual(result.processed_text, "Reidentified text with actual values") + + def test__convert_detected_entity_to_entity_info(self): + """Test converting detected entity to EntityInfo object.""" + mock_detected_entity = Mock() + mock_detected_entity.token = "token123" + mock_detected_entity.value = "sensitive_value" + mock_detected_entity.entity_type = "EMAIL" + mock_detected_entity.entity_scores = {"EMAIL": 0.95} + mock_detected_entity.location = Mock( + start_index=10, + end_index=20, + start_index_processed=15, + end_index_processed=25 + ) + + result = convert_detected_entity_to_entity_info(mock_detected_entity) + + self.assertEqual(result.token, "token123") + self.assertEqual(result.value, "sensitive_value") + self.assertEqual(result.entity, "EMAIL") + self.assertEqual(result.scores, {"EMAIL": 0.95}) + self.assertEqual(result.text_index.start, 10) + self.assertEqual(result.text_index.end, 20) + self.assertEqual(result.processed_index.start, 15) + self.assertEqual(result.processed_index.end, 25) + + def test__convert_detected_entity_to_entity_info_with_minimal_data(self): + """Test converting detected entity with minimal required data.""" + mock_detected_entity = Mock() + mock_detected_entity.token = "token123" + mock_detected_entity.value = None + mock_detected_entity.entity_type = "UNKNOWN" + mock_detected_entity.entity_scores = {} + mock_detected_entity.location = Mock( + start_index=0, + end_index=0, + start_index_processed=0, + end_index_processed=0 + ) + + result = convert_detected_entity_to_entity_info(mock_detected_entity) + + self.assertEqual(result.token, "token123") + self.assertIsNone(result.value) + self.assertEqual(result.entity, "UNKNOWN") + self.assertEqual(result.scores, {}) + self.assertEqual(result.text_index.start, 0) + self.assertEqual(result.text_index.end, 0) + self.assertEqual(result.processed_index.start, 0) + self.assertEqual(result.processed_index.end, 0) diff --git a/tests/vault/controller/test__detect.py b/tests/vault/controller/test__detect.py new file mode 100644 index 00000000..29db32dc --- /dev/null +++ b/tests/vault/controller/test__detect.py @@ -0,0 +1,579 @@ +import unittest +from unittest.mock import Mock, patch, MagicMock +import base64 +import os +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.vault.controller import Detect +from skyflow.vault.detect import DeidentifyTextRequest, ReidentifyTextRequest, \ + TokenFormat, DateTransformation, Transformations, DeidentifyFileRequest, GetDetectRunRequest, DeidentifyFileResponse +from skyflow.utils.enums import DetectEntities, TokenType + +VAULT_ID = "test_vault_id" + +class TestDetect(unittest.TestCase): + def setUp(self): + # Mock vault client + self.vault_client = Mock() + self.vault_client.get_vault_id.return_value = VAULT_ID + self.vault_client.get_logger.return_value = Mock() + + # Create a Detect instance with the mock client + self.detect = Detect(self.vault_client) + + @patch("skyflow.vault.controller._detect.validate_deidentify_text_request") + @patch("skyflow.vault.controller._detect.parse_deidentify_text_response") + def test_deidentify_text_success(self, mock_parse_response, mock_validate): + # Mock API response + mock_api_response = Mock() + mock_api_response.data = { + 'text': '[TOKEN_1] lives in [TOKEN_2]', + 'entities': [ + { + 'token': 'Token1', + 'value': 'John', + 'text_index': {'start': 0, 'end': 4}, + 'processed_index': {'start': 0, 'end': 8}, + 'entity': 'NAME', + 'scores': {'confidence': 0.9} + } + ], + 'word_count': 4, + 'char_count': 20 + } + + # Create request + request = DeidentifyTextRequest( + text="John lives in NYC", + entities=[DetectEntities.NAME], + token_format=TokenFormat(default=TokenType.ENTITY_ONLY) + ) + + # Mock detect API + detect_api = self.vault_client.get_detect_text_api.return_value + detect_api.deidentify_string.return_value = mock_api_response + + # Call deidentify_text + response = self.detect.deidentify_text(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + mock_parse_response.assert_called_once_with(mock_api_response) + detect_api.deidentify_string.assert_called_once() + + @patch("skyflow.vault.controller._detect.validate_reidentify_text_request") + @patch("skyflow.vault.controller._detect.parse_reidentify_text_response") + def test_reidentify_text_success(self, mock_parse_response, mock_validate): + # Mock API response + mock_api_response = Mock() + mock_api_response.data = { + 'text': 'John lives in NYC' + } + + # Create request + request = ReidentifyTextRequest( + text="Token1 lives in Token2", + redacted_entities=[DetectEntities.NAME] + ) + + # Mock detect API + detect_api = self.vault_client.get_detect_text_api.return_value + detect_api.reidentify_string.return_value = mock_api_response + + # Call reidentify_text + response = self.detect.reidentify_text(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + mock_parse_response.assert_called_once_with(mock_api_response) + detect_api.reidentify_string.assert_called_once() + + @patch("skyflow.vault.controller._detect.validate_deidentify_text_request") + def test_deidentify_text_handles_generic_error(self, mock_validate): + request = DeidentifyTextRequest( + text="John lives in NYC", + entities=[DetectEntities.NAME] + ) + detect_api = self.vault_client.get_detect_text_api.return_value + detect_api.deidentify_string.side_effect = Exception("Generic Error") + + with self.assertRaises(Exception): + self.detect.deidentify_text(request) + + detect_api.deidentify_string.assert_called_once() + + @patch("skyflow.vault.controller._detect.validate_reidentify_text_request") + def test_reidentify_text_handles_generic_error(self, mock_validate): + request = ReidentifyTextRequest( + text="Token1 lives in Token2", + redacted_entities=[DetectEntities.NAME] + ) + detect_api = self.vault_client.get_detect_text_api.return_value + detect_api.reidentify_string.side_effect = Exception("Generic Error") + + with self.assertRaises(Exception): + self.detect.reidentify_text(request) + + detect_api.reidentify_string.assert_called_once() + + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + @patch("skyflow.vault.controller._detect.base64") + @patch("skyflow.vault.controller._detect.os.path.basename") + @patch("skyflow.vault.controller._detect.open", create=True) + def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64, mock_validate): + file_content = b"test content" + file_obj = Mock() + file_obj.read.return_value = file_content + file_obj.name = "/tmp/test.txt" + mock_basename.return_value = "test.txt" + mock_base64.b64encode.return_value = b"dGVzdCBjb250ZW50" + req = DeidentifyFileRequest(file=file_obj) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = "/tmp" + files_api = Mock() + files_api.with_raw_response = files_api + files_api.deidentify_text = Mock() + self.vault_client.get_detect_file_api.return_value = files_api + api_response = Mock() + api_response.data = Mock(run_id="runid123") + files_api.deidentify_text.return_value = api_response + + processed_response = Mock() + processed_response.status = "SUCCESS" + processed_response.output = [] + processed_response.word_character_count = Mock(word_count=1, character_count=1) + with patch.object(self.detect, "_Detect__poll_for_processed_file", + return_value=processed_response) as mock_poll, \ + patch.object(self.detect, "_Detect__parse_deidentify_file_response", + return_value=DeidentifyFileResponse(file="dGVzdCBjb250ZW50", type="txt", extension="txt", + word_count=1, char_count=1, size_in_kb=1, + duration_in_seconds=None, page_count=None, + slide_count=None, entities=[], run_id="runid123", + status="SUCCESS", errors=[])) as mock_parse: + result = self.detect.deidentify_file(req) + mock_validate.assert_called_once() + files_api.deidentify_text.assert_called_once() + mock_poll.assert_called_once() + mock_parse.assert_called_once() + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + @patch("skyflow.vault.controller._detect.base64") + def test_deidentify_file_audio_success(self, mock_base64, mock_validate): + file_content = b"audio bytes" + file_obj = Mock() + file_obj.read.return_value = file_content + file_obj.name = "audio.mp3" + mock_base64.b64encode.return_value = b"YXVkaW8gYnl0ZXM=" + req = DeidentifyFileRequest(file=file_obj) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = None + files_api = Mock() + files_api.with_raw_response = files_api + files_api.deidentify_audio = Mock() + self.vault_client.get_detect_file_api.return_value = files_api + api_response = Mock() + api_response.data = Mock(run_id="runid456") + files_api.deidentify_audio.return_value = api_response + + processed_response = Mock() + processed_response.status = "SUCCESS" + processed_response.output = [] + processed_response.word_character_count = Mock(word_count=1, character_count=1) + with patch.object(self.detect, "_Detect__poll_for_processed_file", + return_value=processed_response) as mock_poll, \ + patch.object(self.detect, "_Detect__parse_deidentify_file_response", + return_value=DeidentifyFileResponse(file="YXVkaW8gYnl0ZXM=", type="mp3", extension="mp3", + word_count=1, char_count=1, size_in_kb=1, + duration_in_seconds=1, page_count=None, + slide_count=None, entities=[], run_id="runid456", + status="SUCCESS", errors=[])) as mock_parse: + result = self.detect.deidentify_file(req) + mock_validate.assert_called_once() + files_api.deidentify_audio.assert_called_once() + mock_poll.assert_called_once() + mock_parse.assert_called_once() + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + def test_deidentify_file_exception(self, mock_validate): + req = DeidentifyFileRequest(file=Mock()) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = None + files_api = Mock() + files_api.with_raw_response = files_api + files_api.deidentify_text.side_effect = Exception("API Error") + self.vault_client.get_detect_file_api.return_value = files_api + with self.assertRaises(Exception): + self.detect.deidentify_file(req) + + @patch("skyflow.vault.controller._detect.validate_get_detect_run_request") + def test_get_detect_run_success(self, mock_validate): + req = GetDetectRunRequest(run_id="runid789") + files_api = Mock() + files_api.with_raw_response = files_api + files_api.get_run = Mock() + self.vault_client.get_detect_file_api.return_value = files_api + response = Mock() + response.status = "SUCCESS" + response.output = [] + response.word_character_count = Mock(word_count=1, character_count=1) + files_api.get_run.return_value = response + with patch.object(self.detect, "_Detect__parse_deidentify_file_response", + return_value=DeidentifyFileResponse(file="file", type="txt", extension="txt", word_count=1, + char_count=1, size_in_kb=1, duration_in_seconds=None, + page_count=None, slide_count=None, entities=[], + run_id="runid789", status="SUCCESS", + errors=[])) as mock_parse: + result = self.detect.get_detect_run(req) + mock_validate.assert_called_once() + files_api.get_run.assert_called_once() + mock_parse.assert_called_once() + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + + @patch("skyflow.vault.controller._detect.validate_get_detect_run_request") + def test_get_detect_run_exception(self, mock_validate): + req = GetDetectRunRequest(run_id="runid789") + files_api = Mock() + files_api.with_raw_response = files_api + files_api.get_run.side_effect = Exception("API Error") + self.vault_client.get_detect_file_api.return_value = files_api + with self.assertRaises(Exception): + self.detect.get_detect_run(req) + + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + @patch("skyflow.vault.controller._detect.base64") + @patch("skyflow.vault.controller._detect.os.path.basename") + @patch("skyflow.vault.controller._detect.open", create=True) + @patch.object(Detect, "_Detect__poll_for_processed_file") + def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, mock_base64, mock_validate): + """Test all file type branches with optimized mocking""" + + # Common mocks + file_content = b"test content" + mock_base64.b64encode.return_value = b"dGVzdCBjb250ZW50" + + # Prepare a generic processed_response for all branches + processed_response = Mock() + processed_response.status = "SUCCESS" + processed_response.output = [ + {"processedFile": "dGVzdCBjb250ZW50", "processedFileType": "pdf", "processedFileExtension": "pdf"} + ] + processed_response.word_character_count = Mock(word_count=1, character_count=1) + processed_response.size = 1 + processed_response.duration = 1 + processed_response.pages = 1 + processed_response.slides = 1 + processed_response.message = "" + processed_response.run_id = "runid123" + mock_poll.return_value = processed_response + + # Patch __parse_deidentify_file_response to return a valid DeidentifyFileResponse + with patch.object(self.detect, "_Detect__parse_deidentify_file_response", + return_value=DeidentifyFileResponse( + file="dGVzdCBjb250ZW50", type="pdf", extension="pdf", + word_count=1, char_count=1, size_in_kb=1, + duration_in_seconds=1, page_count=1, slide_count=1, + entities=[], run_id="runid123", status="SUCCESS", errors=[] + )) as mock_parse: + # Test configuration for different file types + test_cases = [ + ("test.pdf", "pdf", "deidentify_pdf"), + ("test.jpg", "jpg", "deidentify_image"), + ("test.pptx", "pptx", "deidentify_presentation"), + ("test.csv", "csv", "deidentify_spreadsheet"), + ("test.docx", "docx", "deidentify_document"), + ("test.json", "json", "deidentify_structured_text"), + ("test.xml", "xml", "deidentify_structured_text"), + ("test.unknown", "unknown", "deidentify_file") + ] + + for file_name, extension, api_method in test_cases: + with self.subTest(file_type=extension): + # Setup file mock + file_obj = Mock() + file_obj.read.return_value = file_content + file_obj.name = file_name + mock_basename.return_value = file_name + + # Setup request + req = DeidentifyFileRequest(file=file_obj) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = "/tmp" + + # Setup API mock + files_api = Mock() + files_api.with_raw_response = files_api + api_method_mock = Mock() + setattr(files_api, api_method, api_method_mock) + self.vault_client.get_detect_file_api.return_value = files_api + + # Setup API response + api_response = Mock() + api_response.data = Mock(run_id="runid123") + api_method_mock.return_value = api_response + + # Actually run the method + result = self.detect.deidentify_file(req) + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + self.assertEqual(result.file, "dGVzdCBjb250ZW50") + self.assertEqual(result.type, "pdf") + self.assertEqual(result.extension, "pdf") + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + @patch("skyflow.vault.controller._detect.base64") + def test_deidentify_file_exception(self, mock_base64, mock_validate): + file_obj = Mock() + file_obj.read.side_effect = Exception("Read error") + file_obj.name = "test.txt" + req = DeidentifyFileRequest(file=file_obj) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = None + with self.assertRaises(Exception): + self.detect.deidentify_file(req) + + @patch("skyflow.vault.controller._detect.time.sleep", return_value=None) + def test_poll_for_processed_file_success(self, mock_sleep): + files_api = Mock() + files_api.with_raw_response = files_api + self.vault_client.get_detect_file_api.return_value = files_api + + call_count = {"count": 0} + + def get_run_side_effect(*args, **kwargs): + if call_count["count"] < 1: + call_count["count"] += 1 + in_progress = Mock() + in_progress.status = "IN_PROGRESS" + in_progress.message = "" + return Mock(data=in_progress) + else: + success = Mock() + success.status = "SUCCESS" + return Mock(data=success) + + files_api.get_run.side_effect = get_run_side_effect + + # Use max_wait_time > 1 to allow the loop to reach the SUCCESS status + result = self.detect._Detect__poll_for_processed_file("runid123", max_wait_time=2) + self.assertEqual(result.status, "SUCCESS") + + @patch("skyflow.vault.controller._detect.time.sleep", return_value=None) + def test_poll_for_processed_file_failed(self, mock_sleep): + files_api = Mock() + files_api.with_raw_response = files_api + self.vault_client.get_detect_file_api.return_value = files_api + + # Always return FAILED on first call + def get_run_side_effect(*args, **kwargs): + failed = Mock() + failed.status = "FAILED" + failed.message = "fail" + return Mock(data=failed) + + files_api.get_run.side_effect = get_run_side_effect + + result = self.detect._Detect__poll_for_processed_file("runid123", max_wait_time=1) + self.assertEqual(result.status, "FAILED") + self.assertEqual(result.message, "fail") + + def test_parse_deidentify_file_response_dict_and_obj(self): + # Dict input + data = { + "output": [ + {"processedFile": "abc", "processedFileType": "pdf", "processedFileExtension": "pdf"}, + {"processedFile": "def", "processedFileType": "entities", "processedFileExtension": "json"} + ], + "word_character_count": {"word_count": 5, "character_count": 10}, + "size": 1, + "duration": 2, + "pages": 3, + "slides": 4, + "run_id": "runid", + "status": "SUCCESS" + } + result = self.detect._Detect__parse_deidentify_file_response(data, "runid", "SUCCESS") + self.assertIsInstance(result, DeidentifyFileResponse) + + # Object input + class DummyWordChar: + word_count = 7 + character_count = 14 + + class DummyData: + output = [ + type("O", (), + {"processed_file": "abc", "processed_file_type": "pdf", "processed_file_extension": "pdf"})(), + type("O", (), + {"processed_file": "def", "processed_file_type": "entities", "processed_file_extension": "json"})() + ] + word_character_count = DummyWordChar() + size = 1 + duration = 2 + pages = 3 + slides = 4 + run_id = "runid" + status = "SUCCESS" + + obj_data = DummyData() + result = self.detect._Detect__parse_deidentify_file_response(obj_data, "runid", "SUCCESS") + self.assertIsInstance(result, DeidentifyFileResponse) + + def test_get_token_format_missing_attribute(self): + """Test __get_token_format when token_format attribute is missing""" + class DummyRequest: + pass + request = DummyRequest() + result = self.detect._Detect__get_token_format(request) + self.assertIsNone(result) + + def test_get_transformations_missing_shift_dates(self): + """Test __get_transformations when shift_dates is None""" + class DummyTransformations: + shift_dates = None + class DummyRequest: + transformations = DummyTransformations() + request = DummyRequest() + result = self.detect._Detect__get_transformations(request) + self.assertIsNone(result) + + @patch("skyflow.vault.controller._detect.validate_get_detect_run_request") + def test_get_detect_run_in_progress_status(self, mock_validate): + """Test get_detect_run when status is IN_PROGRESS""" + # Setup + run_id = "test_run_id" + req = GetDetectRunRequest(run_id=run_id) + + # Mock API response + files_api = Mock() + files_api.with_raw_response = files_api + mock_response = Mock() + mock_response.data = Mock() + mock_response.data.status = 'IN_PROGRESS' + files_api.get_run.return_value = mock_response + + self.vault_client.get_detect_file_api.return_value = files_api + + # Execute + with patch.object(self.detect, "_Detect__parse_deidentify_file_response") as mock_parse: + result = self.detect.get_detect_run(req) + + # Verify IN_PROGRESS handling + mock_parse.assert_called_once() + args = mock_parse.call_args[0][0] + self.assertIsInstance(args, DeidentifyFileResponse) + self.assertEqual(args.status, 'IN_PROGRESS') + self.assertEqual(args.run_id, run_id) + + def test_get_transformations_with_shift_dates(self): + + class DummyShiftDates: + max = 30 + min = 10 + entities = ["SSN"] + + class DummyTransformations: + shift_dates = DummyShiftDates() + + class DummyRequest: + transformations = DummyTransformations() + + request = DummyRequest() + result = self.detect._Detect__get_transformations(request) + + self.assertEqual(result, { + 'shift_dates': { + 'max_days': 30, + 'min_days': 10, + 'entity_types': ["SSN"] + } + }) + + @patch("skyflow.vault.controller._detect.time.sleep", return_value=None) + def test_poll_for_processed_file_timeout(self, mock_sleep): + """Test polling timeout returns IN_PROGRESS status""" + files_api = Mock() + files_api.with_raw_response = files_api + self.vault_client.get_detect_file_api.return_value = files_api + + # Always return IN_PROGRESS + def get_run_side_effect(*args, **kwargs): + in_progress = Mock() + in_progress.status = "IN_PROGRESS" + return Mock(data=in_progress) + + files_api.get_run.side_effect = get_run_side_effect + + result = self.detect._Detect__poll_for_processed_file("runid123", max_wait_time=1) + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "IN_PROGRESS") + self.assertEqual(result.run_id, "runid123") + + @patch("skyflow.vault.controller._detect.time.sleep", return_value=None) + def test_poll_for_processed_file_wait_time_calculation(self, mock_sleep): + """Test wait time calculation in polling loop""" + files_api = Mock() + files_api.with_raw_response = files_api + self.vault_client.get_detect_file_api.return_value = files_api + + calls = [] + + def track_sleep(*args): + calls.append(args[0]) # Record wait time + + mock_sleep.side_effect = track_sleep + + # Return IN_PROGRESS twice then SUCCESS + responses = [ + Mock(data=Mock(status="IN_PROGRESS")), + Mock(data=Mock(status="IN_PROGRESS")), + Mock(data=Mock(status="SUCCESS")) + ] + files_api.get_run.side_effect = responses + + result = self.detect._Detect__poll_for_processed_file("runid123", max_wait_time=4) + + self.assertEqual(calls, [2, 2]) + self.assertEqual(result.status, "SUCCESS") + + + def test_parse_deidentify_file_response_output_conversion(self): + """Test output conversion in parse_deidentify_file_response""" + + class OutputObj: + processed_file = "file123" + processed_file_type = "pdf" + processed_file_extension = "pdf" + + data = Mock() + data.output = [OutputObj()] + data.word_character_count = Mock(word_count=1, character_count=1) + + result = self.detect._Detect__parse_deidentify_file_response(data) + + self.assertEqual(result.file, "file123") + self.assertEqual(result.type, "pdf") + self.assertEqual(result.extension, "pdf") \ No newline at end of file From e3279f0579c599ca402ee133082ff4430f35f00a Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Tue, 8 Jul 2025 19:38:49 +0530 Subject: [PATCH 37/60] SK-2142 return None in case of empty error for Data APIs and include file path in deidentify file request (#188) --- setup.py | 2 +- skyflow/utils/_skyflow_messages.py | 3 +- skyflow/utils/_utils.py | 18 +- skyflow/utils/_version.py | 2 +- skyflow/utils/validations/_validations.py | 34 +++ .../connection/_invoke_connection_response.py | 5 +- skyflow/vault/controller/_detect.py | 36 ++- skyflow/vault/data/_insert_response.py | 2 - skyflow/vault/data/_query_response.py | 2 +- skyflow/vault/data/_update_response.py | 2 +- skyflow/vault/detect/__init__.py | 3 +- .../vault/detect/_deidentify_file_request.py | 3 +- .../vault/detect/_deidentify_file_response.py | 23 +- skyflow/vault/detect/_file.py | 53 ++++ skyflow/vault/detect/_file_input.py | 19 ++ tests/utils/test__utils.py | 13 +- tests/vault/controller/test__connection.py | 3 +- tests/vault/controller/test__detect.py | 282 +++++++++++++----- tests/vault/controller/test__vault.py | 24 +- 19 files changed, 401 insertions(+), 128 deletions(-) create mode 100644 skyflow/vault/detect/_file.py create mode 100644 skyflow/vault/detect/_file_input.py diff --git a/setup.py b/setup.py index 8c09ec2e..a92f3c8d 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b6' +current_version = '2.0.0b7' setup( name='skyflow', diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index a5b94451..460ca29e 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -169,6 +169,7 @@ class Error(Enum): INVALID_PLAIN_TEXT_ENTITIES_IN_REIDENTIFY= f"{error_prefix} Validation error. The plainTextEntities field must be an array of DetectEntities enums. Specify a valid plainTextEntities." INVALID_DEIDENTIFY_FILE_REQUEST= f"{error_prefix} Validation error. Invalid deidentify file request. Specify a valid deidentify file request." + INVALID_DEIDENTIFY_FILE_INPUT= f"{error_prefix} Validation error. Invalid deidentify file input. Please provide either a file or a file path." EMPTY_FILE_OBJECT= f"{error_prefix} Validation error. File object cannot be empty. Specify a valid file object." INVALID_FILE_FORMAT= f"{error_prefix} Validation error. Invalid file format. Specify a valid file format." MISSING_FILE_SOURCE= f"{error_prefix} Validation error. Provide exactly one of filePath, base64, or fileObject." @@ -197,7 +198,7 @@ class Error(Enum): INVALID_FILE_OR_ENCODED_FILE= f"{error_prefix} . Error while decoding base64 and saving file" INVALID_FILE_TYPE = f"{error_prefix} Validation error. Invalid file type. Specify a valid file type." INVALID_FILE_NAME= f"{error_prefix} Validation error. Invalid file name. Specify a valid file name." - FILE_READ_ERROR= f"{error_prefix} Validation error. Unable to read file. Verify the file path." + INVALID_DEIDENTIFY_FILE_PATH= f"{error_prefix} Validation error. Invalid file path. Specify a valid file path." INVALID_BASE64_HEADER= f"{error_prefix} Validation error. Invalid base64 header. Specify a valid base64 header." INVALID_WAIT_TIME= f"{error_prefix} Validation error. Invalid wait time. Specify a valid wait time as number and should not be greater than 64 secs." INVALID_OUTPUT_DIRECTORY= f"{error_prefix} Validation error. Invalid output directory. Specify a valid output directory as string." diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 6b013a85..77ffe580 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -211,7 +211,6 @@ def get_metrics(): } return details_dic - def parse_insert_response(api_response, continue_on_error): # Retrieve the headers and data from the API response api_response_headers = api_response.headers @@ -239,13 +238,13 @@ def parse_insert_response(api_response, continue_on_error): error = { 'request_index': idx, 'request_id': request_id, - 'error': response['Body']['error'] + 'error': response['Body']['error'], + 'http_code': response['Status'], } errors.append(error) insert_response.inserted_fields = inserted_fields - insert_response.errors = errors - + insert_response.errors = errors if len(errors) > 0 else None else: for record in api_response_data.records: field_data = { @@ -257,6 +256,7 @@ def parse_insert_response(api_response, continue_on_error): inserted_fields.append(field_data) insert_response.inserted_fields = inserted_fields + insert_response.errors = None return insert_response @@ -275,21 +275,17 @@ def parse_delete_response(api_response: V1BulkDeleteRecordResponse): delete_response = DeleteResponse() deleted_ids = api_response.record_id_response delete_response.deleted_ids = deleted_ids - delete_response.errors = [] + delete_response.errors = None return delete_response - def parse_get_response(api_response: V1BulkGetRecordResponse): get_response = GetResponse() data = [] - errors = [] for record in api_response.records: field_data = {field: value for field, value in record.fields.items()} data.append(field_data) get_response.data = data - get_response.errors = errors - return get_response def parse_detokenize_response(api_response: HttpResponse[V1DetokenizeResponse]): @@ -320,7 +316,7 @@ def parse_detokenize_response(api_response: HttpResponse[V1DetokenizeResponse]): errors = errors detokenize_response = DetokenizeResponse() detokenize_response.detokenized_fields = detokenized_fields - detokenize_response.errors = errors + detokenize_response.errors = errors if len(errors) > 0 else None return detokenize_response @@ -357,7 +353,7 @@ def parse_invoke_connection_response(api_response: requests.Response): if 'x-request-id' in api_response.headers: metadata['request_id'] = api_response.headers['x-request-id'] - return InvokeConnectionResponse(data=data, metadata=metadata) + return InvokeConnectionResponse(data=data, metadata=metadata, errors=None) except Exception as e: raise SkyflowError(SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content), status_code) except HTTPError: diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 0cd8592c..343924ba 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b6' \ No newline at end of file +SDK_VERSION = '2.0.0b7' \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index 0ff9f038..bbca6e85 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -9,6 +9,7 @@ from skyflow.utils.logger import log_info, log_error_log from skyflow.vault.detect import DeidentifyTextRequest, ReidentifyTextRequest, TokenFormat, Transformations, \ GetDetectRunRequest, Bleep, DeidentifyFileRequest +from skyflow.vault.detect._file_input import FileInput valid_vault_config_keys = ["vault_id", "cluster_id", "credentials", "env"] valid_connection_config_keys = ["connection_id", "connection_url", "credentials"] @@ -257,9 +258,42 @@ def validate_update_connection_config(logger, config): return True +def validate_file_from_request(file_input: FileInput): + if file_input is None: + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_INPUT.value, invalid_input_error_code) + + has_file = hasattr(file_input, 'file') and file_input.file is not None + has_file_path = hasattr(file_input, 'file_path') and file_input.file_path is not None + + # Must provide exactly one of file or file_path + if (has_file and has_file_path) or (not has_file and not has_file_path): + raise SkyflowError(SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_INPUT.value, invalid_input_error_code) + + if has_file: + file = file_input.file + # Validate file object has required attributes + if not hasattr(file, 'name') or not isinstance(file.name, str) or not file.name.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_TYPE.value, invalid_input_error_code) + + # Validate file name + file_name = os.path.splitext(file.name)[0] + if not file_name or not file_name.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_NAME.value, invalid_input_error_code) + + elif has_file_path: + file_path = file_input.file_path + if not isinstance(file_path, str) or not file_path.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_PATH.value, invalid_input_error_code) + + if not os.path.exists(file_path) or not os.path.isfile(file_path): + raise SkyflowError(SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_PATH.value, invalid_input_error_code) + def validate_deidentify_file_request(logger, request: DeidentifyFileRequest): if not hasattr(request, 'file') or request.file is None: raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_INPUT.value, invalid_input_error_code) + + # Validate file input first + validate_file_from_request(request.file) # Optional: entities if hasattr(request, 'entities') and request.entities is not None: diff --git a/skyflow/vault/connection/_invoke_connection_response.py b/skyflow/vault/connection/_invoke_connection_response.py index 818b94a1..882e150c 100644 --- a/skyflow/vault/connection/_invoke_connection_response.py +++ b/skyflow/vault/connection/_invoke_connection_response.py @@ -1,10 +1,11 @@ class InvokeConnectionResponse: - def __init__(self, data=None, metadata=None): + def __init__(self, data=None, metadata=None, errors=None): self.data = data self.metadata = metadata if metadata else {} + self.errors = errors if errors else None def __repr__(self): - return f"ConnectionResponse('data'={self.data},'metadata'={self.metadata})" + return f"ConnectionResponse('data'={self.data},'metadata'={self.metadata}), 'errors'={self.errors})" def __str__(self): return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py index 1dbd533c..606d58ef 100644 --- a/skyflow/vault/controller/_detect.py +++ b/skyflow/vault/controller/_detect.py @@ -1,3 +1,4 @@ +import io import json import os from skyflow.error import SkyflowError @@ -20,6 +21,7 @@ from skyflow.vault.detect import DeidentifyTextRequest, DeidentifyTextResponse, ReidentifyTextRequest, \ ReidentifyTextResponse, DeidentifyFileRequest, DeidentifyFileResponse, GetDetectRunRequest + class Detect: def __init__(self, vault_client): self.__vault_client = vault_client @@ -124,10 +126,22 @@ def output_to_dict_list(output): word_count = getattr(word_character_count, "word_count", None) char_count = getattr(word_character_count, "character_count", None) + base64_string = first_output.get("file", None) + extension = first_output.get("extension", None) + + file_obj = None + if base64_string is not None: + file_bytes = base64.b64decode(base64_string) + file_obj = io.BytesIO(file_bytes) + file_obj.name = f"deidentified.{extension}" if extension else "processed_file" + else: + file_obj = None + return DeidentifyFileResponse( - file=first_output.get("file", None), + file_base64=base64_string, + file=file_obj, # File class will be instantiated in DeidentifyFileResponse type=first_output.get("type", None), - extension=first_output.get("extension", None), + extension=extension, word_count=word_count, char_count=char_count, size_in_kb=size, @@ -137,7 +151,7 @@ def output_to_dict_list(output): entities=entities, run_id=run_id_val, status=status_val, - errors=[] + errors=None ) def __get_token_format(self, request): @@ -216,16 +230,26 @@ def reidentify_text(self, request: ReidentifyTextRequest) -> ReidentifyTextRespo log_error_log(SkyflowMessages.ErrorLogs.REIDENTIFY_TEXT_REQUEST_REJECTED.value, self.__vault_client.get_logger()) handle_exception(e, self.__vault_client.get_logger()) + def __get_file_from_request(self, request: DeidentifyFileRequest): + file_input = request.file + + # Check for file + if hasattr(file_input, 'file') and file_input.file is not None: + return file_input.file + + # Check for file_path if file is not provided + if hasattr(file_input, 'file_path') and file_input.file_path is not None: + return open(file_input.file_path, 'rb') + def deidentify_file(self, request: DeidentifyFileRequest): log_info(SkyflowMessages.Info.DETECT_FILE_TRIGGERED.value, self.__vault_client.get_logger()) validate_deidentify_file_request(self.__vault_client.get_logger(), request) self.__initialize() files_api = self.__vault_client.get_detect_file_api().with_raw_response - file_obj = request.file + file_obj = self.__get_file_from_request(request) file_name = getattr(file_obj, 'name', None) file_extension = self._get_file_extension(file_name) if file_name else None file_content = file_obj.read() - base64_string = base64.b64encode(file_content).decode('utf-8') try: @@ -375,7 +399,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): file_name_only = 'processed-'+os.path.basename(file_name) output_file_path = f"{request.output_directory}/{file_name_only}" with open(output_file_path, 'wb') as output_file: - output_file.write(base64.b64decode(parsed_response.file)) + output_file.write(base64.b64decode(parsed_response.file_base64)) log_info(SkyflowMessages.Info.DETECT_FILE_SUCCESS.value, self.__vault_client.get_logger()) return parsed_response diff --git a/skyflow/vault/data/_insert_response.py b/skyflow/vault/data/_insert_response.py index 6407426d..0c7c777f 100644 --- a/skyflow/vault/data/_insert_response.py +++ b/skyflow/vault/data/_insert_response.py @@ -1,7 +1,5 @@ class InsertResponse: def __init__(self, inserted_fields = None, errors=None): - if errors is None: - errors = list() self.inserted_fields = inserted_fields self.errors = errors diff --git a/skyflow/vault/data/_query_response.py b/skyflow/vault/data/_query_response.py index e2034758..b97fa9bd 100644 --- a/skyflow/vault/data/_query_response.py +++ b/skyflow/vault/data/_query_response.py @@ -1,7 +1,7 @@ class QueryResponse: def __init__(self): self.fields = [] - self.errors = [] + self.errors = None def __repr__(self): return f"QueryResponse(fields={self.fields}, errors={self.errors})" diff --git a/skyflow/vault/data/_update_response.py b/skyflow/vault/data/_update_response.py index dbbb9cc7..c37ee000 100644 --- a/skyflow/vault/data/_update_response.py +++ b/skyflow/vault/data/_update_response.py @@ -1,7 +1,7 @@ class UpdateResponse: def __init__(self, updated_field = None, errors=None): self.updated_field = updated_field - self.errors = errors if errors is not None else [] + self.errors = errors def __repr__(self): return f"UpdateResponse(updated_field={self.updated_field}, errors={self.errors})" diff --git a/skyflow/vault/detect/__init__.py b/skyflow/vault/detect/__init__.py index e385a1f2..bd09fed8 100644 --- a/skyflow/vault/detect/__init__.py +++ b/skyflow/vault/detect/__init__.py @@ -10,4 +10,5 @@ from ._deidentify_file_request import DeidentifyFileRequest from ._audio_bleep import Bleep from ._deidentify_file_response import DeidentifyFileResponse -from ._get_detect_run_request import GetDetectRunRequest \ No newline at end of file +from ._get_detect_run_request import GetDetectRunRequest +from ._file_input import FileInput \ No newline at end of file diff --git a/skyflow/vault/detect/_deidentify_file_request.py b/skyflow/vault/detect/_deidentify_file_request.py index a429f5d5..09d8b118 100644 --- a/skyflow/vault/detect/_deidentify_file_request.py +++ b/skyflow/vault/detect/_deidentify_file_request.py @@ -3,6 +3,7 @@ from skyflow.vault.detect import TokenFormat, Transformations from skyflow.vault.detect._audio_bleep import Bleep from skyflow.utils.enums import MaskingMethod, DetectOutputTranscriptions +from skyflow.vault.detect._file_input import FileInput class DeidentifyFileRequest: def __init__( @@ -24,7 +25,7 @@ def __init__( output_directory: Optional[str] = None, wait_time: Optional[Union[int, float]] = None ): - self.file: object = file + self.file: FileInput = file self.entities: Optional[List[DetectEntities]] = entities self.allow_regex_list: Optional[List[str]] = allow_regex_list self.restrict_regex_list: Optional[List[str]] = restrict_regex_list diff --git a/skyflow/vault/detect/_deidentify_file_response.py b/skyflow/vault/detect/_deidentify_file_response.py index f386080d..90a0d493 100644 --- a/skyflow/vault/detect/_deidentify_file_response.py +++ b/skyflow/vault/detect/_deidentify_file_response.py @@ -1,7 +1,11 @@ +import io +from skyflow.vault.detect._file import File + class DeidentifyFileResponse: def __init__( self, - file: str = None, + file_base64: str = None, + file: io.BytesIO = None, type: str = None, extension: str = None, word_count: int = None, @@ -13,9 +17,10 @@ def __init__( entities: list = None, # list of dicts with keys 'file' and 'extension' run_id: str = None, status: str = None, - errors: list = [], + errors: list = None, ): - self.file = file + self.file_base64 = file_base64 + self.file = File(file) if file else None self.type = type self.extension = extension self.word_count = word_count @@ -32,12 +37,12 @@ def __init__( def __repr__(self): return ( f"DeidentifyFileResponse(" - f"file={self.file!r}, type={self.type!r}, extension={self.extension!r}, " - f"word_count={self.word_count!r}, char_count={self.char_count!r}, " - f"size_in_kb={self.size_in_kb!r}, duration_in_seconds={self.duration_in_seconds!r}, " - f"page_count={self.page_count!r}, slide_count={self.slide_count!r}, " - f"entities={self.entities!r}, run_id={self.run_id!r}, status={self.status!r})," - f"errors={self.errors!r})" + f"file_base64={self.file_base64!r}, file={self.file!r}, type={self.type!r}, " + f"extension={self.extension!r}, word_count={self.word_count!r}, " + f"char_count={self.char_count!r}, size_in_kb={self.size_in_kb!r}, " + f"duration_in_seconds={self.duration_in_seconds!r}, page_count={self.page_count!r}, " + f"slide_count={self.slide_count!r}, entities={self.entities!r}, " + f"run_id={self.run_id!r}, status={self.status!r}, errors={self.errors!r})" ) def __str__(self): diff --git a/skyflow/vault/detect/_file.py b/skyflow/vault/detect/_file.py new file mode 100644 index 00000000..ad188666 --- /dev/null +++ b/skyflow/vault/detect/_file.py @@ -0,0 +1,53 @@ +import io +import mimetypes +import time + +class File: + def __init__(self, file: io.BytesIO = None): + self.file = file + + @property + def name(self) -> str: + """Get file name""" + if self.file: + return getattr(self.file, 'name', 'unknown') + return None + + @property + def size(self) -> int: + """Get file size in bytes""" + if self.file: + pos = self.file.tell() + self.file.seek(0, io.SEEK_END) + size = self.file.tell() + self.file.seek(pos) + return size + return None + + @property + def type(self) -> str: + """Get file mime type""" + if self.file: + return mimetypes.guess_type(self.name)[0] or '' + return None + + @property + def last_modified(self) -> int: + """Get file last modified timestamp in milliseconds""" + if self.file: + return int(time.time() * 1000) + return None + + def seek(self, offset, whence=0): + if self.file: + return self.file.seek(offset, whence) + + def read(self, size=-1): + if self.file: + return self.file.read(size) + + def __repr__(self): + return ( + f"File(name={self.name!r}, size={self.size!r}, type={self.type!r}, " + f"last_modified={self.last_modified!r})" + ) diff --git a/skyflow/vault/detect/_file_input.py b/skyflow/vault/detect/_file_input.py new file mode 100644 index 00000000..472ca0e2 --- /dev/null +++ b/skyflow/vault/detect/_file_input.py @@ -0,0 +1,19 @@ +class FileInput: + """ + Represents a file input for the vault detection process. + + Attributes: + file (str): The file object to be processed. This can be a file-like object or a binary string. + file_path (str): The path to the file to be processed. + """ + + def __init__(self, file: str= None, file_path: str = None): + self.file = file + self.file_path = file_path + + def __repr__(self) -> str: + return f"FileInput(file={self.file!r}, file_path={self.file_path!r})" + + def __str__(self) -> str: + return self.__repr__() + \ No newline at end of file diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index 6324d9a7..6eaacf47 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -252,6 +252,12 @@ def test_parse_insert_response(self): result = parse_insert_response(api_response, continue_on_error=True) self.assertEqual(len(result.inserted_fields), 1) self.assertEqual(len(result.errors), 1) + # Assert first successful record + self.assertEqual(result.inserted_fields[0]["skyflow_id"], "id1") + # Assert error record + self.assertEqual(result.errors[0]["error"], TEST_ERROR_MESSAGE) + self.assertEqual(result.errors[0]["http_code"], 400) + self.assertEqual(result.errors[0]["request_id"], "12345") def test_parse_insert_response_continue_on_error_false(self): mock_api_response = Mock() @@ -270,7 +276,7 @@ def test_parse_insert_response_continue_on_error_false(self): ] self.assertEqual(result.inserted_fields, expected_inserted_fields) - self.assertEqual(result.errors, []) + self.assertEqual(result.errors, None) def test_parse_update_record_response(self): api_response = Mock() @@ -291,7 +297,7 @@ def test_parse_delete_response_successful(self): expected_deleted_ids = ["id_1", "id_2", "id_3"] self.assertEqual(result.deleted_ids, expected_deleted_ids) - self.assertEqual(result.errors, []) + self.assertEqual(result.errors, None) def test_parse_get_response_successful(self): mock_api_response = Mock() @@ -310,7 +316,7 @@ def test_parse_get_response_successful(self): ] self.assertEqual(result.data, expected_data) - self.assertEqual(result.errors, []) + # self.assertEqual(result.errors, None) def test_parse_detokenize_response_with_mixed_records(self): mock_api_response = Mock() @@ -384,6 +390,7 @@ def test_parse_invoke_connection_response_successful(self, mock_response): self.assertIsInstance(result, InvokeConnectionResponse) self.assertEqual(result.data["key"], "value") self.assertEqual(result.metadata["request_id"], "1234") + self.assertEqual(result.errors, None) @patch("requests.Response") def test_parse_invoke_connection_response_json_decode_error(self, mock_response): diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py index 70702514..4ccad1c7 100644 --- a/tests/vault/controller/test__connection.py +++ b/tests/vault/controller/test__connection.py @@ -55,7 +55,8 @@ def test_invoke_success(self, mock_send): # Assertions for successful invocation expected_response = { 'data': {"response": "success"}, - 'metadata': {"request_id": "test-request-id"} + 'metadata': {"request_id": "test-request-id"}, + 'errors': None } self.assertEqual(vars(response), expected_response) self.mock_vault_client.get_bearer_token.assert_called_once() diff --git a/tests/vault/controller/test__detect.py b/tests/vault/controller/test__detect.py index 29db32dc..1352f85b 100644 --- a/tests/vault/controller/test__detect.py +++ b/tests/vault/controller/test__detect.py @@ -6,8 +6,12 @@ from skyflow.utils import SkyflowMessages from skyflow.vault.controller import Detect from skyflow.vault.detect import DeidentifyTextRequest, ReidentifyTextRequest, \ - TokenFormat, DateTransformation, Transformations, DeidentifyFileRequest, GetDetectRunRequest, DeidentifyFileResponse + TokenFormat, DateTransformation, Transformations, DeidentifyFileRequest, GetDetectRunRequest, \ + DeidentifyFileResponse, FileInput from skyflow.utils.enums import DetectEntities, TokenType +import io + +from skyflow.vault.detect._file import File VAULT_ID = "test_vault_id" @@ -127,7 +131,7 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 file_obj.name = "/tmp/test.txt" mock_basename.return_value = "test.txt" mock_base64.b64encode.return_value = b"dGVzdCBjb250ZW50" - req = DeidentifyFileRequest(file=file_obj) + req = DeidentifyFileRequest(file=FileInput(file=file_obj)) req.entities = [] req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) req.allow_regex_list = [] @@ -149,18 +153,38 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", - return_value=DeidentifyFileResponse(file="dGVzdCBjb250ZW50", type="txt", extension="txt", + return_value=DeidentifyFileResponse(file_base64="dGVzdCBjb250ZW50", + file=io.BytesIO(b"test content"), type="txt", + extension="txt", word_count=1, char_count=1, size_in_kb=1, duration_in_seconds=None, page_count=None, slide_count=None, entities=[], run_id="runid123", - status="SUCCESS", errors=[])) as mock_parse: + status="SUCCESS", errors=None)) as mock_parse: result = self.detect.deidentify_file(req) + mock_validate.assert_called_once() files_api.deidentify_text.assert_called_once() mock_poll.assert_called_once() mock_parse.assert_called_once() + self.assertIsInstance(result, DeidentifyFileResponse) self.assertEqual(result.status, "SUCCESS") + self.assertEqual(result.run_id, "runid123") + self.assertEqual(result.file_base64, "dGVzdCBjb250ZW50") + self.assertEqual(result.type, "txt") + self.assertEqual(result.extension, "txt") + + self.assertIsInstance(result.file, File) + result.file.seek(0) + self.assertEqual(result.file.read(), b"test content") + self.assertEqual(result.word_count, 1) + self.assertEqual(result.char_count, 1) + self.assertEqual(result.size_in_kb, 1) + self.assertIsNone(result.duration_in_seconds) + self.assertIsNone(result.page_count) + self.assertIsNone(result.slide_count) + self.assertEqual(result.entities, []) + self.assertEqual(result.errors, None) @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") @patch("skyflow.vault.controller._detect.base64") @@ -170,7 +194,7 @@ def test_deidentify_file_audio_success(self, mock_base64, mock_validate): file_obj.read.return_value = file_content file_obj.name = "audio.mp3" mock_base64.b64encode.return_value = b"YXVkaW8gYnl0ZXM=" - req = DeidentifyFileRequest(file=file_obj) + req = DeidentifyFileRequest(file=FileInput(file=file_obj)) req.entities = [] req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) req.allow_regex_list = [] @@ -192,11 +216,13 @@ def test_deidentify_file_audio_success(self, mock_base64, mock_validate): with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", - return_value=DeidentifyFileResponse(file="YXVkaW8gYnl0ZXM=", type="mp3", extension="mp3", + return_value=DeidentifyFileResponse(file_base64="YXVkaW8gYnl0ZXM=", + file=io.BytesIO(b"audio bytes"), type="mp3", + extension="mp3", word_count=1, char_count=1, size_in_kb=1, duration_in_seconds=1, page_count=None, slide_count=None, entities=[], run_id="runid456", - status="SUCCESS", errors=[])) as mock_parse: + status="SUCCESS", errors=None)) as mock_parse: result = self.detect.deidentify_file(req) mock_validate.assert_called_once() files_api.deidentify_audio.assert_called_once() @@ -238,7 +264,7 @@ def test_get_detect_run_success(self, mock_validate): char_count=1, size_in_kb=1, duration_in_seconds=None, page_count=None, slide_count=None, entities=[], run_id="runid789", status="SUCCESS", - errors=[])) as mock_parse: + errors=None)) as mock_parse: result = self.detect.get_detect_run(req) mock_validate.assert_called_once() files_api.get_run.assert_called_once() @@ -262,11 +288,10 @@ def test_get_detect_run_exception(self, mock_validate): @patch("skyflow.vault.controller._detect.open", create=True) @patch.object(Detect, "_Detect__poll_for_processed_file") def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, mock_base64, mock_validate): - """Test all file type branches with optimized mocking""" - # Common mocks file_content = b"test content" mock_base64.b64encode.return_value = b"dGVzdCBjb250ZW50" + mock_base64.b64decode.return_value = file_content # Prepare a generic processed_response for all branches processed_response = Mock() @@ -283,69 +308,78 @@ def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, processed_response.run_id = "runid123" mock_poll.return_value = processed_response - # Patch __parse_deidentify_file_response to return a valid DeidentifyFileResponse - with patch.object(self.detect, "_Detect__parse_deidentify_file_response", - return_value=DeidentifyFileResponse( - file="dGVzdCBjb250ZW50", type="pdf", extension="pdf", - word_count=1, char_count=1, size_in_kb=1, - duration_in_seconds=1, page_count=1, slide_count=1, - entities=[], run_id="runid123", status="SUCCESS", errors=[] - )) as mock_parse: - # Test configuration for different file types - test_cases = [ - ("test.pdf", "pdf", "deidentify_pdf"), - ("test.jpg", "jpg", "deidentify_image"), - ("test.pptx", "pptx", "deidentify_presentation"), - ("test.csv", "csv", "deidentify_spreadsheet"), - ("test.docx", "docx", "deidentify_document"), - ("test.json", "json", "deidentify_structured_text"), - ("test.xml", "xml", "deidentify_structured_text"), - ("test.unknown", "unknown", "deidentify_file") - ] + # Test configuration for different file types + test_cases = [ + ("test.pdf", "pdf", "deidentify_pdf"), + ("test.jpg", "jpg", "deidentify_image"), + ("test.pptx", "pptx", "deidentify_presentation"), + ("test.csv", "csv", "deidentify_spreadsheet"), + ("test.docx", "docx", "deidentify_document"), + ("test.json", "json", "deidentify_structured_text"), + ("test.xml", "xml", "deidentify_structured_text"), + ("test.unknown", "unknown", "deidentify_file") + ] + + for file_name, extension, api_method in test_cases: + with self.subTest(file_type=extension): + # Setup file mock + file_obj = Mock() + file_obj.read.return_value = file_content + file_obj.name = file_name + mock_basename.return_value = file_name + + # Setup request with FileInput + req = DeidentifyFileRequest(file=FileInput(file=file_obj)) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = "/tmp" + + # Setup API mock + files_api = Mock() + files_api.with_raw_response = files_api + api_method_mock = Mock() + setattr(files_api, api_method, api_method_mock) + self.vault_client.get_detect_file_api.return_value = files_api + + # Setup API response + api_response = Mock() + api_response.data = Mock(run_id="runid123") + api_method_mock.return_value = api_response + + # Actually run the method + result = self.detect.deidentify_file(req) + + # Verify the result + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + self.assertEqual(result.run_id, "runid123") + self.assertEqual(result.file_base64, "dGVzdCBjb250ZW50") + self.assertIsInstance(result.file, File) + result.file.seek(0) # Reset file pointer before reading + self.assertEqual(result.file.read(), b"test content") + self.assertEqual(result.type, "pdf") + self.assertEqual(result.extension, "pdf") + self.assertEqual(result.size_in_kb, 1) + self.assertEqual(result.duration_in_seconds, 1) + self.assertEqual(result.page_count, 1) + self.assertEqual(result.slide_count, 1) + self.assertEqual(result.word_count, 1) + self.assertEqual(result.char_count, 1) + + # Verify API was called + api_method_mock.assert_called_once() + mock_poll.assert_called_with("runid123", None) - for file_name, extension, api_method in test_cases: - with self.subTest(file_type=extension): - # Setup file mock - file_obj = Mock() - file_obj.read.return_value = file_content - file_obj.name = file_name - mock_basename.return_value = file_name - - # Setup request - req = DeidentifyFileRequest(file=file_obj) - req.entities = [] - req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) - req.allow_regex_list = [] - req.restrict_regex_list = [] - req.transformations = None - req.output_directory = "/tmp" - - # Setup API mock - files_api = Mock() - files_api.with_raw_response = files_api - api_method_mock = Mock() - setattr(files_api, api_method, api_method_mock) - self.vault_client.get_detect_file_api.return_value = files_api - - # Setup API response - api_response = Mock() - api_response.data = Mock(run_id="runid123") - api_method_mock.return_value = api_response - - # Actually run the method - result = self.detect.deidentify_file(req) - self.assertIsInstance(result, DeidentifyFileResponse) - self.assertEqual(result.status, "SUCCESS") - self.assertEqual(result.file, "dGVzdCBjb250ZW50") - self.assertEqual(result.type, "pdf") - self.assertEqual(result.extension, "pdf") @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") @patch("skyflow.vault.controller._detect.base64") def test_deidentify_file_exception(self, mock_base64, mock_validate): file_obj = Mock() file_obj.read.side_effect = Exception("Read error") file_obj.name = "test.txt" - req = DeidentifyFileRequest(file=file_obj) + req = DeidentifyFileRequest(file=FileInput(file=file_obj)) req.entities = [] req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) req.allow_regex_list = [] @@ -404,8 +438,8 @@ def test_parse_deidentify_file_response_dict_and_obj(self): # Dict input data = { "output": [ - {"processedFile": "abc", "processedFileType": "pdf", "processedFileExtension": "pdf"}, - {"processedFile": "def", "processedFileType": "entities", "processedFileExtension": "json"} + {"processedFile": "YWJj", "processedFileType": "pdf", "processedFileExtension": "pdf"}, # base64 for "abc" + {"processedFile": "ZGVm", "processedFileType": "entities", "processedFileExtension": "json"} # base64 for "def" ], "word_character_count": {"word_count": 5, "character_count": 10}, "size": 1, @@ -426,9 +460,9 @@ class DummyWordChar: class DummyData: output = [ type("O", (), - {"processed_file": "abc", "processed_file_type": "pdf", "processed_file_extension": "pdf"})(), + {"processed_file": "YWJj", "processed_file_type": "pdf", "processed_file_extension": "pdf"})(), type("O", (), - {"processed_file": "def", "processed_file_type": "entities", "processed_file_extension": "json"})() + {"processed_file": "ZGVm", "processed_file_type": "entities", "processed_file_extension": "json"})() ] word_character_count = DummyWordChar() size = 1 @@ -441,7 +475,9 @@ class DummyData: obj_data = DummyData() result = self.detect._Detect__parse_deidentify_file_response(obj_data, "runid", "SUCCESS") self.assertIsInstance(result, DeidentifyFileResponse) - + self.assertEqual(result.file_base64, "YWJj") + self.assertIsInstance(result.file, File) + self.assertEqual(result.file.read(), b"abc") def test_get_token_format_missing_attribute(self): """Test __get_token_format when token_format attribute is missing""" class DummyRequest: @@ -559,12 +595,11 @@ def track_sleep(*args): self.assertEqual(calls, [2, 2]) self.assertEqual(result.status, "SUCCESS") - def test_parse_deidentify_file_response_output_conversion(self): """Test output conversion in parse_deidentify_file_response""" class OutputObj: - processed_file = "file123" + processed_file = "YWJjMTIz" # base64 for "abc123" processed_file_type = "pdf" processed_file_extension = "pdf" @@ -574,6 +609,103 @@ class OutputObj: result = self.detect._Detect__parse_deidentify_file_response(data) - self.assertEqual(result.file, "file123") + # Check base64 string + self.assertEqual(result.file_base64, "YWJjMTIz") + # Check File object + self.assertIsInstance(result.file, File) + self.assertEqual(result.file.read(), b"abc123") + # Check other attributes self.assertEqual(result.type, "pdf") - self.assertEqual(result.extension, "pdf") \ No newline at end of file + self.assertEqual(result.extension, "pdf") + # Reset file pointer and verify content again + result.file.seek(0) + self.assertEqual(result.file.read(), b"abc123") + + @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") + @patch("skyflow.vault.controller._detect.base64") + @patch("skyflow.vault.controller._detect.os.path.basename") + @patch("skyflow.vault.controller._detect.open", create=True) + def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_base64, mock_validate): + # Setup mock file context + mock_file = MagicMock() + mock_file.read.return_value = b"test content from file path" + mock_file.name = "/path/to/test.txt" + mock_file.__enter__.return_value = mock_file # Mock context manager + mock_open.return_value = mock_file + mock_basename.return_value = "test.txt" + mock_base64.b64encode.return_value = b"dGVzdCBjb250ZW50IGZyb20gZmlsZSBwYXRo" # base64 of "test content from file path" + mock_base64.b64decode.return_value = b"test content from file path" + # Create request with file_path + req = DeidentifyFileRequest(file=FileInput(file_path="/path/to/test.txt")) + req.entities = [] + req.token_format = Mock(default="default", entity_unique_counter=[], entity_only=[]) + req.allow_regex_list = [] + req.restrict_regex_list = [] + req.transformations = None + req.output_directory = "/tmp" + + # Setup API mock + files_api = Mock() + files_api.with_raw_response = files_api + files_api.deidentify_text = Mock() + self.vault_client.get_detect_file_api.return_value = files_api + api_response = Mock() + api_response.data = Mock(run_id="runid123") + files_api.deidentify_text.return_value = api_response + + # Setup processed response + processed_response = Mock() + processed_response.status = "SUCCESS" + processed_response.output = [] + processed_response.word_character_count = Mock(word_count=1, character_count=1) + + # Test the method + with patch.object(self.detect, "_Detect__poll_for_processed_file", + return_value=processed_response) as mock_poll, \ + patch.object(self.detect, "_Detect__parse_deidentify_file_response", + return_value=DeidentifyFileResponse( + file_base64="dGVzdCBjb250ZW50IGZyb20gZmlsZSBwYXRo", + file=io.BytesIO(b"test content from file path"), + type="txt", + extension="txt", + word_count=1, + char_count=1, + size_in_kb=1, + duration_in_seconds=None, + page_count=None, + slide_count=None, + entities=[], + run_id="runid123", + status="SUCCESS", + errors=None + )) as mock_parse: + + result = self.detect.deidentify_file(req) + + mock_file.read.assert_called_once() + mock_basename.assert_called_with("/path/to/test.txt") + + mock_validate.assert_called_once() + files_api.deidentify_text.assert_called_once() + mock_poll.assert_called_once() + mock_parse.assert_called_once() + + # Response assertions + self.assertIsInstance(result, DeidentifyFileResponse) + self.assertEqual(result.status, "SUCCESS") + self.assertEqual(result.run_id, "runid123") + self.assertEqual(result.file_base64, "dGVzdCBjb250ZW50IGZyb20gZmlsZSBwYXRo") + self.assertEqual(result.type, "txt") + self.assertEqual(result.extension, "txt") + + self.assertIsInstance(result.file, File) + result.file.seek(0) + self.assertEqual(result.file.read(), b"test content from file path") + self.assertEqual(result.word_count, 1) + self.assertEqual(result.char_count, 1) + self.assertEqual(result.size_in_kb, 1) + self.assertIsNone(result.duration_in_seconds) + self.assertIsNone(result.page_count) + self.assertIsNone(result.slide_count) + self.assertEqual(result.entities, []) + self.assertEqual(result.errors, None) diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 39b44ae1..0c8a7743 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -123,7 +123,7 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Assert that the result matches the expected InsertResponse self.assertEqual(result.inserted_fields, expected_inserted_fields) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_insert_request") def test_insert_handles_generic_error(self, mock_validate): @@ -181,7 +181,7 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Assert that the result matches the expected InsertResponse self.assertEqual(result.inserted_fields, expected_inserted_fields) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_update_request") @patch("skyflow.vault.controller._vault.parse_update_record_response") @@ -223,7 +223,7 @@ def test_update_successful(self, mock_parse_response, mock_validate): # Check that the result matches the expected UpdateResponse self.assertEqual(result.updated_field, expected_updated_field) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_update_request") def test_update_handles_generic_error(self, mock_validate): @@ -257,7 +257,7 @@ def test_delete_successful(self, mock_parse_response, mock_validate): # Expected parsed response expected_deleted_ids = ["12345", "67890"] - expected_response = DeleteResponse(deleted_ids=expected_deleted_ids, errors=[]) + expected_response = DeleteResponse(deleted_ids=expected_deleted_ids, errors=None) # Set the return value for the parse response mock_parse_response.return_value = expected_response @@ -273,7 +273,7 @@ def test_delete_successful(self, mock_parse_response, mock_validate): # Check that the result matches the expected DeleteResponse self.assertEqual(result.deleted_ids, expected_deleted_ids) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_delete_request") def test_delete_handles_generic_exception(self, mock_validate): @@ -330,7 +330,7 @@ def test_get_successful(self, mock_parse_response, mock_validate): {"field1": "value1", "field2": "value2"}, {"field1": "value3", "field2": "value4"} ] - expected_response = GetResponse(data=expected_data, errors=[]) + expected_response = GetResponse(data=expected_data, errors=None) # Set the return value for parse_get_response mock_parse_response.return_value = expected_response @@ -346,7 +346,7 @@ def test_get_successful(self, mock_parse_response, mock_validate): # Check that the result matches the expected GetResponse self.assertEqual(result.data, expected_data) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_get_request") @patch("skyflow.vault.controller._vault.parse_get_response") @@ -381,7 +381,7 @@ def test_get_successful_with_column_values(self, mock_parse_response, mock_valid {"field1": "value1", "field2": "value2"}, {"field1": "value3", "field2": "value4"} ] - expected_response = GetResponse(data=expected_data, errors=[]) + expected_response = GetResponse(data=expected_data, errors=None) # Set the return value for parse_get_response mock_parse_response.return_value = expected_response @@ -397,7 +397,7 @@ def test_get_successful_with_column_values(self, mock_parse_response, mock_valid # Check that the result matches the expected GetResponse self.assertEqual(result.data, expected_data) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_get_request") def test_get_handles_generic_error(self, mock_validate): @@ -446,7 +446,7 @@ def test_query_successful(self, mock_parse_response, mock_validate): # Check that the result matches the expected QueryResponse self.assertEqual(result.fields, expected_fields) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_query_request") def test_query_handles_generic_error(self, mock_validate): @@ -495,7 +495,7 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): {"token": "token1", "value": "value1", "type": "STRING"}, {"token": "token2", "value": "value2", "type": "STRING"} ] - expected_response = DetokenizeResponse(detokenized_fields=expected_fields, errors=[]) + expected_response = DetokenizeResponse(detokenized_fields=expected_fields, errors=None) # Set the return value for parse_detokenize_response mock_parse_response.return_value = expected_response @@ -511,7 +511,7 @@ def test_detokenize_successful(self, mock_parse_response, mock_validate): # Check that the result matches the expected DetokenizeResponse self.assertEqual(result.detokenized_fields, expected_fields) - self.assertEqual(result.errors, []) # No errors expected + self.assertEqual(result.errors, None) # No errors expected @patch("skyflow.vault.controller._vault.validate_detokenize_request") def test_detokenize_handles_generic_error(self, mock_validate): From dc36c8694b02090741434cd18ebac3e038f24b43 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Thu, 24 Jul 2025 19:57:19 +0530 Subject: [PATCH 38/60] SK-2199: Update Python SDK v2 with latest Detect API changes (v4.0) (#190) * SK-2199: Update Python SDK v2 with latest Detect API changes (v4.0) (#189) --- setup.py | 2 +- skyflow/generated/rest/__init__.py | 72 +- skyflow/generated/rest/client.py | 16 +- skyflow/generated/rest/core/client_wrapper.py | 16 +- .../generated/rest/core/pydantic_utilities.py | 6 +- skyflow/generated/rest/deprecated/client.py | 598 ----------------- .../generated/rest/deprecated/raw_client.py | 624 ------------------ skyflow/generated/rest/files/__init__.py | 6 + skyflow/generated/rest/files/client.py | 184 ++++++ skyflow/generated/rest/files/raw_client.py | 266 ++++++++ .../generated/rest/files/types/__init__.py | 6 + ...eidentify_file_request_file_data_format.py | 1 + ...deidentify_image_request_masking_method.py | 2 +- .../types/reidentify_file_request_file.py | 34 + ...eidentify_file_request_file_data_format.py | 7 + .../types/reidentify_file_request_format.py | 37 ++ .../{deprecated => guardrails}/__init__.py | 0 skyflow/generated/rest/guardrails/client.py | 164 +++++ .../generated/rest/guardrails/raw_client.py | 221 +++++++ skyflow/generated/rest/strings/client.py | 9 + skyflow/generated/rest/strings/raw_client.py | 9 + skyflow/generated/rest/types/__init__.py | 62 +- .../types/advanced_options_column_mapping.py | 37 -- .../advanced_options_entity_column_map.py | 28 - .../types/advanced_options_vault_schema.py | 29 - .../types/audio_config_transcription_type.py | 19 - .../rest/types/check_guardrails_response.py | 42 ++ .../check_guardrails_response_validation.py | 5 + .../generated/rest/types/configuration_id.py | 3 + .../rest/types/deidentify_file_output.py | 14 +- .../rest/types/deidentify_status_response.py | 20 +- ...fy_status_response_word_character_count.py | 26 - .../rest/types/detect_data_accuracy.py | 17 - .../rest/types/detect_data_entities.py | 72 -- .../types/detect_file_request_data_type.py | 5 - .../types/detect_request_deidentify_option.py | 5 - skyflow/generated/rest/types/entity_type.py | 9 +- ...ocessed_file_output_processed_file_type.py | 19 - ...options.py => reidentify_file_response.py} | 16 +- ....py => reidentify_file_response_output.py} | 13 +- .../types/reidentify_file_response_status.py | 5 + .../rest/types/v_1_advanced_options.py | 38 -- .../generated/rest/types/v_1_audio_config.py | 31 - .../generated/rest/types/v_1_audio_options.py | 46 -- .../rest/types/v_1_detect_file_response.py | 26 - .../rest/types/v_1_detect_status_response.py | 34 - .../v_1_detect_status_response_status.py | 5 - .../rest/types/v_1_detect_text_request.py | 68 -- .../rest/types/v_1_detect_text_response.py | 32 - .../rest/types/v_1_file_data_format.py | 28 - skyflow/generated/rest/types/v_1_locations.py | 41 -- .../generated/rest/types/v_1_pdf_config.py | 24 - .../rest/types/v_1_processed_file_output.py | 31 - .../rest/types/v_1_response_entities.py | 43 -- skyflow/utils/_version.py | 2 +- skyflow/utils/enums/detect_entities.py | 9 +- skyflow/vault/controller/_detect.py | 38 +- tests/vault/controller/test__detect.py | 24 +- 58 files changed, 1140 insertions(+), 2106 deletions(-) delete mode 100644 skyflow/generated/rest/deprecated/client.py delete mode 100644 skyflow/generated/rest/deprecated/raw_client.py create mode 100644 skyflow/generated/rest/files/types/reidentify_file_request_file.py create mode 100644 skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/reidentify_file_request_format.py rename skyflow/generated/rest/{deprecated => guardrails}/__init__.py (100%) create mode 100644 skyflow/generated/rest/guardrails/client.py create mode 100644 skyflow/generated/rest/guardrails/raw_client.py delete mode 100644 skyflow/generated/rest/types/advanced_options_column_mapping.py delete mode 100644 skyflow/generated/rest/types/advanced_options_entity_column_map.py delete mode 100644 skyflow/generated/rest/types/advanced_options_vault_schema.py delete mode 100644 skyflow/generated/rest/types/audio_config_transcription_type.py create mode 100644 skyflow/generated/rest/types/check_guardrails_response.py create mode 100644 skyflow/generated/rest/types/check_guardrails_response_validation.py create mode 100644 skyflow/generated/rest/types/configuration_id.py delete mode 100644 skyflow/generated/rest/types/deidentify_status_response_word_character_count.py delete mode 100644 skyflow/generated/rest/types/detect_data_accuracy.py delete mode 100644 skyflow/generated/rest/types/detect_data_entities.py delete mode 100644 skyflow/generated/rest/types/detect_file_request_data_type.py delete mode 100644 skyflow/generated/rest/types/detect_request_deidentify_option.py delete mode 100644 skyflow/generated/rest/types/processed_file_output_processed_file_type.py rename skyflow/generated/rest/types/{v_1_pdf_options.py => reidentify_file_response.py} (51%) rename skyflow/generated/rest/types/{v_1_image_options.py => reidentify_file_response_output.py} (57%) create mode 100644 skyflow/generated/rest/types/reidentify_file_response_status.py delete mode 100644 skyflow/generated/rest/types/v_1_advanced_options.py delete mode 100644 skyflow/generated/rest/types/v_1_audio_config.py delete mode 100644 skyflow/generated/rest/types/v_1_audio_options.py delete mode 100644 skyflow/generated/rest/types/v_1_detect_file_response.py delete mode 100644 skyflow/generated/rest/types/v_1_detect_status_response.py delete mode 100644 skyflow/generated/rest/types/v_1_detect_status_response_status.py delete mode 100644 skyflow/generated/rest/types/v_1_detect_text_request.py delete mode 100644 skyflow/generated/rest/types/v_1_detect_text_response.py delete mode 100644 skyflow/generated/rest/types/v_1_file_data_format.py delete mode 100644 skyflow/generated/rest/types/v_1_locations.py delete mode 100644 skyflow/generated/rest/types/v_1_pdf_config.py delete mode 100644 skyflow/generated/rest/types/v_1_processed_file_output.py delete mode 100644 skyflow/generated/rest/types/v_1_response_entities.py diff --git a/setup.py b/setup.py index a92f3c8d..c9707d32 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b7' +current_version = '2.0.0b7.dev0+b8744bb' setup( name='skyflow', diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index 9ff683cb..bad57c24 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -3,16 +3,15 @@ # isort: skip_file from .types import ( - AdvancedOptionsColumnMapping, - AdvancedOptionsEntityColumnMap, - AdvancedOptionsVaultSchema, AllowRegex, - AudioConfigTranscriptionType, AuditEventAuditResourceType, AuditEventContext, AuditEventData, AuditEventHttpInfo, BatchRecordMethod, + CheckGuardrailsResponse, + CheckGuardrailsResponseValidation, + ConfigurationId, ContextAccessType, ContextAuthMode, DeidentifyFileOutput, @@ -21,12 +20,7 @@ DeidentifyStatusResponse, DeidentifyStatusResponseOutputType, DeidentifyStatusResponseStatus, - DeidentifyStatusResponseWordCharacterCount, DeidentifyStringResponse, - DetectDataAccuracy, - DetectDataEntities, - DetectFileRequestDataType, - DetectRequestDeidentifyOption, DetectedEntity, DetokenizeRecordResponseValueType, EntityLocation, @@ -36,9 +30,11 @@ ErrorResponseError, ErrorString, GooglerpcStatus, - ProcessedFileOutputProcessedFileType, ProtobufAny, RedactionEnumRedaction, + ReidentifyFileResponse, + ReidentifyFileResponseOutput, + ReidentifyFileResponseStatus, ReidentifyStringResponse, RequestActionType, ResourceId, @@ -51,9 +47,6 @@ TransformationsShiftDates, TransformationsShiftDatesEntityTypesItem, Uuid, - V1AdvancedOptions, - V1AudioConfig, - V1AudioOptions, V1AuditAfterOptions, V1AuditEventResponse, V1AuditResponse, @@ -68,29 +61,17 @@ V1Card, V1DeleteFileResponse, V1DeleteRecordResponse, - V1DetectFileResponse, - V1DetectStatusResponse, - V1DetectStatusResponseStatus, - V1DetectTextRequest, - V1DetectTextResponse, V1DetokenizeRecordRequest, V1DetokenizeRecordResponse, V1DetokenizeResponse, V1FieldRecords, V1FileAvScanStatus, - V1FileDataFormat, V1GetAuthTokenResponse, V1GetFileScanStatusResponse, V1GetQueryResponse, - V1ImageOptions, V1InsertRecordResponse, - V1Locations, V1MemberType, - V1PdfConfig, - V1PdfOptions, - V1ProcessedFileOutput, V1RecordMetaProperties, - V1ResponseEntities, V1TokenizeRecordRequest, V1TokenizeRecordResponse, V1TokenizeResponse, @@ -100,7 +81,7 @@ VaultId, ) from .errors import BadRequestError, InternalServerError, NotFoundError, UnauthorizedError -from . import audit, authentication, bin_lookup, deprecated, files, query, records, strings, tokens +from . import audit, authentication, bin_lookup, files, guardrails, query, records, strings, tokens from .audit import ( AuditServiceListAuditEventsRequestFilterOpsActionType, AuditServiceListAuditEventsRequestFilterOpsContextAccessType, @@ -130,6 +111,9 @@ DeidentifyStructuredTextRequestFile, DeidentifyStructuredTextRequestFileDataFormat, DeidentifyTextRequestFile, + ReidentifyFileRequestFile, + ReidentifyFileRequestFileDataFormat, + ReidentifyFileRequestFormat, ) from .records import ( RecordServiceBulkGetRecordRequestOrderBy, @@ -140,12 +124,8 @@ from .version import __version__ __all__ = [ - "AdvancedOptionsColumnMapping", - "AdvancedOptionsEntityColumnMap", - "AdvancedOptionsVaultSchema", "AllowRegex", "AsyncSkyflow", - "AudioConfigTranscriptionType", "AuditEventAuditResourceType", "AuditEventContext", "AuditEventData", @@ -158,6 +138,9 @@ "AuditServiceListAuditEventsRequestSortOpsOrderBy", "BadRequestError", "BatchRecordMethod", + "CheckGuardrailsResponse", + "CheckGuardrailsResponseValidation", + "ConfigurationId", "ContextAccessType", "ContextAuthMode", "DeidentifyAudioRequestFile", @@ -181,15 +164,10 @@ "DeidentifyStatusResponse", "DeidentifyStatusResponseOutputType", "DeidentifyStatusResponseStatus", - "DeidentifyStatusResponseWordCharacterCount", "DeidentifyStringResponse", "DeidentifyStructuredTextRequestFile", "DeidentifyStructuredTextRequestFileDataFormat", "DeidentifyTextRequestFile", - "DetectDataAccuracy", - "DetectDataEntities", - "DetectFileRequestDataType", - "DetectRequestDeidentifyOption", "DetectedEntity", "DetokenizeRecordResponseValueType", "EntityLocation", @@ -201,12 +179,17 @@ "GooglerpcStatus", "InternalServerError", "NotFoundError", - "ProcessedFileOutputProcessedFileType", "ProtobufAny", "RecordServiceBulkGetRecordRequestOrderBy", "RecordServiceBulkGetRecordRequestRedaction", "RecordServiceGetRecordRequestRedaction", "RedactionEnumRedaction", + "ReidentifyFileRequestFile", + "ReidentifyFileRequestFileDataFormat", + "ReidentifyFileRequestFormat", + "ReidentifyFileResponse", + "ReidentifyFileResponseOutput", + "ReidentifyFileResponseStatus", "ReidentifyStringRequestFormat", "ReidentifyStringResponse", "RequestActionType", @@ -223,9 +206,6 @@ "TransformationsShiftDatesEntityTypesItem", "UnauthorizedError", "Uuid", - "V1AdvancedOptions", - "V1AudioConfig", - "V1AudioOptions", "V1AuditAfterOptions", "V1AuditEventResponse", "V1AuditResponse", @@ -240,29 +220,17 @@ "V1Card", "V1DeleteFileResponse", "V1DeleteRecordResponse", - "V1DetectFileResponse", - "V1DetectStatusResponse", - "V1DetectStatusResponseStatus", - "V1DetectTextRequest", - "V1DetectTextResponse", "V1DetokenizeRecordRequest", "V1DetokenizeRecordResponse", "V1DetokenizeResponse", "V1FieldRecords", "V1FileAvScanStatus", - "V1FileDataFormat", "V1GetAuthTokenResponse", "V1GetFileScanStatusResponse", "V1GetQueryResponse", - "V1ImageOptions", "V1InsertRecordResponse", - "V1Locations", "V1MemberType", - "V1PdfConfig", - "V1PdfOptions", - "V1ProcessedFileOutput", "V1RecordMetaProperties", - "V1ResponseEntities", "V1TokenizeRecordRequest", "V1TokenizeRecordResponse", "V1TokenizeResponse", @@ -274,8 +242,8 @@ "audit", "authentication", "bin_lookup", - "deprecated", "files", + "guardrails", "query", "records", "strings", diff --git a/skyflow/generated/rest/client.py b/skyflow/generated/rest/client.py index 315d1f86..e111c0b2 100644 --- a/skyflow/generated/rest/client.py +++ b/skyflow/generated/rest/client.py @@ -7,9 +7,9 @@ from .authentication.client import AsyncAuthenticationClient, AuthenticationClient from .bin_lookup.client import AsyncBinLookupClient, BinLookupClient from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .deprecated.client import AsyncDeprecatedClient, DeprecatedClient from .environment import SkyflowEnvironment from .files.client import AsyncFilesClient, FilesClient +from .guardrails.client import AsyncGuardrailsClient, GuardrailsClient from .query.client import AsyncQueryClient, QueryClient from .records.client import AsyncRecordsClient, RecordsClient from .strings.client import AsyncStringsClient, StringsClient @@ -35,6 +35,9 @@ class Skyflow: token : typing.Union[str, typing.Callable[[], str]] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -59,6 +62,7 @@ def __init__( base_url: typing.Optional[str] = None, environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, token: typing.Union[str, typing.Callable[[], str]], + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None, @@ -69,6 +73,7 @@ def __init__( self._client_wrapper = SyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) @@ -82,7 +87,7 @@ def __init__( self.tokens = TokensClient(client_wrapper=self._client_wrapper) self.query = QueryClient(client_wrapper=self._client_wrapper) self.authentication = AuthenticationClient(client_wrapper=self._client_wrapper) - self.deprecated = DeprecatedClient(client_wrapper=self._client_wrapper) + self.guardrails = GuardrailsClient(client_wrapper=self._client_wrapper) self.strings = StringsClient(client_wrapper=self._client_wrapper) self.files = FilesClient(client_wrapper=self._client_wrapper) @@ -106,6 +111,9 @@ class AsyncSkyflow: token : typing.Union[str, typing.Callable[[], str]] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -130,6 +138,7 @@ def __init__( base_url: typing.Optional[str] = None, environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, token: typing.Union[str, typing.Callable[[], str]], + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None, @@ -140,6 +149,7 @@ def __init__( self._client_wrapper = AsyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) @@ -153,7 +163,7 @@ def __init__( self.tokens = AsyncTokensClient(client_wrapper=self._client_wrapper) self.query = AsyncQueryClient(client_wrapper=self._client_wrapper) self.authentication = AsyncAuthenticationClient(client_wrapper=self._client_wrapper) - self.deprecated = AsyncDeprecatedClient(client_wrapper=self._client_wrapper) + self.guardrails = AsyncGuardrailsClient(client_wrapper=self._client_wrapper) self.strings = AsyncStringsClient(client_wrapper=self._client_wrapper) self.files = AsyncFilesClient(client_wrapper=self._client_wrapper) diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index aa31aea3..a3210a7e 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -11,18 +11,21 @@ def __init__( self, *, token: typing.Union[str, typing.Callable[[], str]], + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, ): self._token = token + self._headers = headers self._base_url = base_url self._timeout = timeout def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", - "X-Fern-SDK-Name": "skyflow.generated.rest", - "X-Fern-SDK-Version": "0.0.209", + "X-Fern-SDK-Name": "skyflow_vault", + "X-Fern-SDK-Version": "0.0.252", + **(self.get_custom_headers() or {}), } headers["Authorization"] = f"Bearer {self._get_token()}" return headers @@ -33,6 +36,9 @@ def _get_token(self) -> str: else: return self._token() + def get_custom_headers(self) -> typing.Optional[typing.Dict[str, str]]: + return self._headers + def get_base_url(self) -> str: return self._base_url @@ -45,11 +51,12 @@ def __init__( self, *, token: typing.Union[str, typing.Callable[[], str]], + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.Client, ): - super().__init__(token=token, base_url=base_url, timeout=timeout) + super().__init__(token=token, headers=headers, base_url=base_url, timeout=timeout) self.httpx_client = HttpClient( httpx_client=httpx_client, base_headers=self.get_headers, @@ -63,11 +70,12 @@ def __init__( self, *, token: typing.Union[str, typing.Callable[[], str]], + headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient, ): - super().__init__(token=token, base_url=base_url, timeout=timeout) + super().__init__(token=token, headers=headers, base_url=base_url, timeout=timeout) self.httpx_client = AsyncHttpClient( httpx_client=httpx_client, base_headers=self.get_headers, diff --git a/skyflow/generated/rest/core/pydantic_utilities.py b/skyflow/generated/rest/core/pydantic_utilities.py index 0360ef49..7db29500 100644 --- a/skyflow/generated/rest/core/pydantic_utilities.py +++ b/skyflow/generated/rest/core/pydantic_utilities.py @@ -59,9 +59,9 @@ class UniversalBaseModel(pydantic.BaseModel): protected_namespaces=(), ) - @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore[attr-defined] - def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> Any: # type: ignore[name-defined] - serialized = handler(self) + @pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined] + def serialize_model(self) -> Any: # type: ignore[name-defined] + serialized = self.model_dump() data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} return data diff --git a/skyflow/generated/rest/deprecated/client.py b/skyflow/generated/rest/deprecated/client.py deleted file mode 100644 index bd1cc88c..00000000 --- a/skyflow/generated/rest/deprecated/client.py +++ /dev/null @@ -1,598 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.request_options import RequestOptions -from ..types.detect_data_accuracy import DetectDataAccuracy -from ..types.detect_data_entities import DetectDataEntities -from ..types.detect_file_request_data_type import DetectFileRequestDataType -from ..types.detect_request_deidentify_option import DetectRequestDeidentifyOption -from ..types.v_1_advanced_options import V1AdvancedOptions -from ..types.v_1_audio_config import V1AudioConfig -from ..types.v_1_detect_file_response import V1DetectFileResponse -from ..types.v_1_detect_status_response import V1DetectStatusResponse -from ..types.v_1_detect_text_response import V1DetectTextResponse -from ..types.v_1_file_data_format import V1FileDataFormat -from ..types.v_1_image_options import V1ImageOptions -from ..types.v_1_pdf_config import V1PdfConfig -from .raw_client import AsyncRawDeprecatedClient, RawDeprecatedClient - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class DeprecatedClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawDeprecatedClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawDeprecatedClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawDeprecatedClient - """ - return self._raw_client - - def detect_service_detect_file_input( - self, - *, - file: str, - data_format: V1FileDataFormat, - input_type: DetectFileRequestDataType, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - audio: typing.Optional[V1AudioConfig] = OMIT, - image: typing.Optional[V1ImageOptions] = OMIT, - pdf: typing.Optional[V1PdfConfig] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> V1DetectFileResponse: - """ - Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. - - Parameters - ---------- - file : str - Path of the file or base64-encoded data that has to be processed. - - data_format : V1FileDataFormat - - input_type : DetectFileRequestDataType - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - audio : typing.Optional[V1AudioConfig] - - image : typing.Optional[V1ImageOptions] - - pdf : typing.Optional[V1PdfConfig] - - advanced_options : typing.Optional[V1AdvancedOptions] - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectFileResponse - A successful response. - - Examples - -------- - from skyflow import Skyflow, V1AudioConfig - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.deprecated.detect_service_detect_file_input( - file="fkdjfhdlnnggtsjj...", - data_format="mp3", - input_type="BASE64", - vault_id="a372f752689c9bfc8ca3d4dba", - restrict_entity_types=[ - "name", - "age", - "location", - "ssn", - "bank_account", - "credit_card", - "credit_card_expiration", - "cvv", - "date", - "date_interval", - "dob", - "driver_license", - "email_address", - "healthcare_number", - "numerical_pii", - "phone_number", - "medical_code", - "account_number", - "gender_sexuality", - "name_medical_professional", - "occupation", - "organization", - "organization_medical_facility", - ], - return_entities=True, - accuracy="high_multilingual", - audio=V1AudioConfig( - output_transcription="none", - output_processed_audio=False, - ), - ) - """ - _response = self._raw_client.detect_service_detect_file_input( - file=file, - data_format=data_format, - input_type=input_type, - vault_id=vault_id, - session_id=session_id, - restrict_entity_types=restrict_entity_types, - allow_regex=allow_regex, - restrict_regex=restrict_regex, - return_entities=return_entities, - accuracy=accuracy, - audio=audio, - image=image, - pdf=pdf, - advanced_options=advanced_options, - deidentify_token_format=deidentify_token_format, - request_options=request_options, - ) - return _response.data - - def detect_service_detect_status( - self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> V1DetectStatusResponse: - """ - Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. - - Parameters - ---------- - id : str - ID of the deidentification request. - - vault_id : typing.Optional[str] - ID of the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectStatusResponse - A successful response. - - Examples - -------- - from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.deprecated.detect_service_detect_status( - id="ID", - ) - """ - _response = self._raw_client.detect_service_detect_status( - id, vault_id=vault_id, request_options=request_options - ) - return _response.data - - def detect_service_detect_text( - self, - *, - text: str, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - store_entities: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> V1DetectTextResponse: - """ - Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. - - Parameters - ---------- - text : str - Data to deidentify. - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - advanced_options : typing.Optional[V1AdvancedOptions] - - store_entities : typing.Optional[bool] - Indicates whether entities should be stored in the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectTextResponse - A successful response. - - Examples - -------- - from skyflow import Skyflow - - client = Skyflow( - token="YOUR_TOKEN", - ) - client.deprecated.detect_service_detect_text( - text="text", - vault_id="c848741aefb74bf38780da5399a76507", - ) - """ - _response = self._raw_client.detect_service_detect_text( - text=text, - vault_id=vault_id, - session_id=session_id, - restrict_entity_types=restrict_entity_types, - deidentify_token_format=deidentify_token_format, - allow_regex=allow_regex, - restrict_regex=restrict_regex, - return_entities=return_entities, - accuracy=accuracy, - advanced_options=advanced_options, - store_entities=store_entities, - request_options=request_options, - ) - return _response.data - - -class AsyncDeprecatedClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawDeprecatedClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawDeprecatedClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawDeprecatedClient - """ - return self._raw_client - - async def detect_service_detect_file_input( - self, - *, - file: str, - data_format: V1FileDataFormat, - input_type: DetectFileRequestDataType, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - audio: typing.Optional[V1AudioConfig] = OMIT, - image: typing.Optional[V1ImageOptions] = OMIT, - pdf: typing.Optional[V1PdfConfig] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> V1DetectFileResponse: - """ - Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. - - Parameters - ---------- - file : str - Path of the file or base64-encoded data that has to be processed. - - data_format : V1FileDataFormat - - input_type : DetectFileRequestDataType - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - audio : typing.Optional[V1AudioConfig] - - image : typing.Optional[V1ImageOptions] - - pdf : typing.Optional[V1PdfConfig] - - advanced_options : typing.Optional[V1AdvancedOptions] - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectFileResponse - A successful response. - - Examples - -------- - import asyncio - - from skyflow import AsyncSkyflow, V1AudioConfig - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - - async def main() -> None: - await client.deprecated.detect_service_detect_file_input( - file="fkdjfhdlnnggtsjj...", - data_format="mp3", - input_type="BASE64", - vault_id="a372f752689c9bfc8ca3d4dba", - restrict_entity_types=[ - "name", - "age", - "location", - "ssn", - "bank_account", - "credit_card", - "credit_card_expiration", - "cvv", - "date", - "date_interval", - "dob", - "driver_license", - "email_address", - "healthcare_number", - "numerical_pii", - "phone_number", - "medical_code", - "account_number", - "gender_sexuality", - "name_medical_professional", - "occupation", - "organization", - "organization_medical_facility", - ], - return_entities=True, - accuracy="high_multilingual", - audio=V1AudioConfig( - output_transcription="none", - output_processed_audio=False, - ), - ) - - - asyncio.run(main()) - """ - _response = await self._raw_client.detect_service_detect_file_input( - file=file, - data_format=data_format, - input_type=input_type, - vault_id=vault_id, - session_id=session_id, - restrict_entity_types=restrict_entity_types, - allow_regex=allow_regex, - restrict_regex=restrict_regex, - return_entities=return_entities, - accuracy=accuracy, - audio=audio, - image=image, - pdf=pdf, - advanced_options=advanced_options, - deidentify_token_format=deidentify_token_format, - request_options=request_options, - ) - return _response.data - - async def detect_service_detect_status( - self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> V1DetectStatusResponse: - """ - Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. - - Parameters - ---------- - id : str - ID of the deidentification request. - - vault_id : typing.Optional[str] - ID of the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectStatusResponse - A successful response. - - Examples - -------- - import asyncio - - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - - async def main() -> None: - await client.deprecated.detect_service_detect_status( - id="ID", - ) - - - asyncio.run(main()) - """ - _response = await self._raw_client.detect_service_detect_status( - id, vault_id=vault_id, request_options=request_options - ) - return _response.data - - async def detect_service_detect_text( - self, - *, - text: str, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - store_entities: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> V1DetectTextResponse: - """ - Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. - - Parameters - ---------- - text : str - Data to deidentify. - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - advanced_options : typing.Optional[V1AdvancedOptions] - - store_entities : typing.Optional[bool] - Indicates whether entities should be stored in the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - V1DetectTextResponse - A successful response. - - Examples - -------- - import asyncio - - from skyflow import AsyncSkyflow - - client = AsyncSkyflow( - token="YOUR_TOKEN", - ) - - - async def main() -> None: - await client.deprecated.detect_service_detect_text( - text="text", - vault_id="c848741aefb74bf38780da5399a76507", - ) - - - asyncio.run(main()) - """ - _response = await self._raw_client.detect_service_detect_text( - text=text, - vault_id=vault_id, - session_id=session_id, - restrict_entity_types=restrict_entity_types, - deidentify_token_format=deidentify_token_format, - allow_regex=allow_regex, - restrict_regex=restrict_regex, - return_entities=return_entities, - accuracy=accuracy, - advanced_options=advanced_options, - store_entities=store_entities, - request_options=request_options, - ) - return _response.data diff --git a/skyflow/generated/rest/deprecated/raw_client.py b/skyflow/generated/rest/deprecated/raw_client.py deleted file mode 100644 index 11dd7ef1..00000000 --- a/skyflow/generated/rest/deprecated/raw_client.py +++ /dev/null @@ -1,624 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from ..errors.not_found_error import NotFoundError -from ..types.detect_data_accuracy import DetectDataAccuracy -from ..types.detect_data_entities import DetectDataEntities -from ..types.detect_file_request_data_type import DetectFileRequestDataType -from ..types.detect_request_deidentify_option import DetectRequestDeidentifyOption -from ..types.v_1_advanced_options import V1AdvancedOptions -from ..types.v_1_audio_config import V1AudioConfig -from ..types.v_1_detect_file_response import V1DetectFileResponse -from ..types.v_1_detect_status_response import V1DetectStatusResponse -from ..types.v_1_detect_text_response import V1DetectTextResponse -from ..types.v_1_file_data_format import V1FileDataFormat -from ..types.v_1_image_options import V1ImageOptions -from ..types.v_1_pdf_config import V1PdfConfig - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawDeprecatedClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def detect_service_detect_file_input( - self, - *, - file: str, - data_format: V1FileDataFormat, - input_type: DetectFileRequestDataType, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - audio: typing.Optional[V1AudioConfig] = OMIT, - image: typing.Optional[V1ImageOptions] = OMIT, - pdf: typing.Optional[V1PdfConfig] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[V1DetectFileResponse]: - """ - Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. - - Parameters - ---------- - file : str - Path of the file or base64-encoded data that has to be processed. - - data_format : V1FileDataFormat - - input_type : DetectFileRequestDataType - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - audio : typing.Optional[V1AudioConfig] - - image : typing.Optional[V1ImageOptions] - - pdf : typing.Optional[V1PdfConfig] - - advanced_options : typing.Optional[V1AdvancedOptions] - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[V1DetectFileResponse] - A successful response. - """ - _response = self._client_wrapper.httpx_client.request( - "v1/detect/file", - method="POST", - json={ - "file": file, - "data_format": data_format, - "input_type": input_type, - "vault_id": vault_id, - "session_id": session_id, - "restrict_entity_types": restrict_entity_types, - "allow_regex": allow_regex, - "restrict_regex": restrict_regex, - "return_entities": return_entities, - "accuracy": accuracy, - "audio": convert_and_respect_annotation_metadata( - object_=audio, annotation=V1AudioConfig, direction="write" - ), - "image": convert_and_respect_annotation_metadata( - object_=image, annotation=V1ImageOptions, direction="write" - ), - "pdf": convert_and_respect_annotation_metadata(object_=pdf, annotation=V1PdfConfig, direction="write"), - "advanced_options": convert_and_respect_annotation_metadata( - object_=advanced_options, annotation=V1AdvancedOptions, direction="write" - ), - "deidentify_token_format": deidentify_token_format, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectFileResponse, - parse_obj_as( - type_=V1DetectFileResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def detect_service_detect_status( - self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[V1DetectStatusResponse]: - """ - Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. - - Parameters - ---------- - id : str - ID of the deidentification request. - - vault_id : typing.Optional[str] - ID of the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[V1DetectStatusResponse] - A successful response. - """ - _response = self._client_wrapper.httpx_client.request( - f"v1/detect/status/{jsonable_encoder(id)}", - method="GET", - params={ - "vault_id": vault_id, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectStatusResponse, - parse_obj_as( - type_=V1DetectStatusResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def detect_service_detect_text( - self, - *, - text: str, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - store_entities: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[V1DetectTextResponse]: - """ - Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. - - Parameters - ---------- - text : str - Data to deidentify. - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - advanced_options : typing.Optional[V1AdvancedOptions] - - store_entities : typing.Optional[bool] - Indicates whether entities should be stored in the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[V1DetectTextResponse] - A successful response. - """ - _response = self._client_wrapper.httpx_client.request( - "v1/detect/text", - method="POST", - json={ - "text": text, - "vault_id": vault_id, - "session_id": session_id, - "restrict_entity_types": restrict_entity_types, - "deidentify_token_format": deidentify_token_format, - "allow_regex": allow_regex, - "restrict_regex": restrict_regex, - "return_entities": return_entities, - "accuracy": accuracy, - "advanced_options": convert_and_respect_annotation_metadata( - object_=advanced_options, annotation=V1AdvancedOptions, direction="write" - ), - "store_entities": store_entities, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectTextResponse, - parse_obj_as( - type_=V1DetectTextResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawDeprecatedClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def detect_service_detect_file_input( - self, - *, - file: str, - data_format: V1FileDataFormat, - input_type: DetectFileRequestDataType, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - audio: typing.Optional[V1AudioConfig] = OMIT, - image: typing.Optional[V1ImageOptions] = OMIT, - pdf: typing.Optional[V1PdfConfig] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[V1DetectFileResponse]: - """ - Note: This operation is deprecated. Use one of the De-identify File operations.

          Detects and deidentifies sensitive data from image, audio, and video files. - - Parameters - ---------- - file : str - Path of the file or base64-encoded data that has to be processed. - - data_format : V1FileDataFormat - - input_type : DetectFileRequestDataType - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - audio : typing.Optional[V1AudioConfig] - - image : typing.Optional[V1ImageOptions] - - pdf : typing.Optional[V1PdfConfig] - - advanced_options : typing.Optional[V1AdvancedOptions] - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[V1DetectFileResponse] - A successful response. - """ - _response = await self._client_wrapper.httpx_client.request( - "v1/detect/file", - method="POST", - json={ - "file": file, - "data_format": data_format, - "input_type": input_type, - "vault_id": vault_id, - "session_id": session_id, - "restrict_entity_types": restrict_entity_types, - "allow_regex": allow_regex, - "restrict_regex": restrict_regex, - "return_entities": return_entities, - "accuracy": accuracy, - "audio": convert_and_respect_annotation_metadata( - object_=audio, annotation=V1AudioConfig, direction="write" - ), - "image": convert_and_respect_annotation_metadata( - object_=image, annotation=V1ImageOptions, direction="write" - ), - "pdf": convert_and_respect_annotation_metadata(object_=pdf, annotation=V1PdfConfig, direction="write"), - "advanced_options": convert_and_respect_annotation_metadata( - object_=advanced_options, annotation=V1AdvancedOptions, direction="write" - ), - "deidentify_token_format": deidentify_token_format, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectFileResponse, - parse_obj_as( - type_=V1DetectFileResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def detect_service_detect_status( - self, id: str, *, vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[V1DetectStatusResponse]: - """ - Note: This operation is deprecated. Use Get Detect Run.

          Returns the status of a file deidentification request. - - Parameters - ---------- - id : str - ID of the deidentification request. - - vault_id : typing.Optional[str] - ID of the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[V1DetectStatusResponse] - A successful response. - """ - _response = await self._client_wrapper.httpx_client.request( - f"v1/detect/status/{jsonable_encoder(id)}", - method="GET", - params={ - "vault_id": vault_id, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectStatusResponse, - parse_obj_as( - type_=V1DetectStatusResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def detect_service_detect_text( - self, - *, - text: str, - vault_id: str, - session_id: typing.Optional[str] = OMIT, - restrict_entity_types: typing.Optional[typing.Sequence[DetectDataEntities]] = OMIT, - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = OMIT, - allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, - restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, - return_entities: typing.Optional[bool] = OMIT, - accuracy: typing.Optional[DetectDataAccuracy] = OMIT, - advanced_options: typing.Optional[V1AdvancedOptions] = OMIT, - store_entities: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[V1DetectTextResponse]: - """ - Note: This operation is deprecated. Use De-identify String.

          Detects and deidentifies sensitive data from text. - - Parameters - ---------- - text : str - Data to deidentify. - - vault_id : str - ID of the vault. - - session_id : typing.Optional[str] - Will give a handle to delete the tokens generated during a specific interaction. - - restrict_entity_types : typing.Optional[typing.Sequence[DetectDataEntities]] - Entities to detect and deidentify. - - deidentify_token_format : typing.Optional[DetectRequestDeidentifyOption] - - allow_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to ignore when detecting entities. - - restrict_regex : typing.Optional[typing.Sequence[str]] - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - - return_entities : typing.Optional[bool] - If `true`, returns the details for the detected entities. - - accuracy : typing.Optional[DetectDataAccuracy] - - advanced_options : typing.Optional[V1AdvancedOptions] - - store_entities : typing.Optional[bool] - Indicates whether entities should be stored in the vault. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[V1DetectTextResponse] - A successful response. - """ - _response = await self._client_wrapper.httpx_client.request( - "v1/detect/text", - method="POST", - json={ - "text": text, - "vault_id": vault_id, - "session_id": session_id, - "restrict_entity_types": restrict_entity_types, - "deidentify_token_format": deidentify_token_format, - "allow_regex": allow_regex, - "restrict_regex": restrict_regex, - "return_entities": return_entities, - "accuracy": accuracy, - "advanced_options": convert_and_respect_annotation_metadata( - object_=advanced_options, annotation=V1AdvancedOptions, direction="write" - ), - "store_entities": store_entities, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - V1DetectTextResponse, - parse_obj_as( - type_=V1DetectTextResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/files/__init__.py b/skyflow/generated/rest/files/__init__.py index 1b10a15a..b1679867 100644 --- a/skyflow/generated/rest/files/__init__.py +++ b/skyflow/generated/rest/files/__init__.py @@ -21,6 +21,9 @@ DeidentifyStructuredTextRequestFile, DeidentifyStructuredTextRequestFileDataFormat, DeidentifyTextRequestFile, + ReidentifyFileRequestFile, + ReidentifyFileRequestFileDataFormat, + ReidentifyFileRequestFormat, ) __all__ = [ @@ -42,4 +45,7 @@ "DeidentifyStructuredTextRequestFile", "DeidentifyStructuredTextRequestFileDataFormat", "DeidentifyTextRequestFile", + "ReidentifyFileRequestFile", + "ReidentifyFileRequestFileDataFormat", + "ReidentifyFileRequestFormat", ] diff --git a/skyflow/generated/rest/files/client.py b/skyflow/generated/rest/files/client.py index 913ccd59..654789de 100644 --- a/skyflow/generated/rest/files/client.py +++ b/skyflow/generated/rest/files/client.py @@ -5,9 +5,11 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.allow_regex import AllowRegex +from ..types.configuration_id import ConfigurationId from ..types.deidentify_file_response import DeidentifyFileResponse from ..types.deidentify_status_response import DeidentifyStatusResponse from ..types.entity_types import EntityTypes +from ..types.reidentify_file_response import ReidentifyFileResponse from ..types.resource_id import ResourceId from ..types.restrict_regex import RestrictRegex from ..types.token_type_without_vault import TokenTypeWithoutVault @@ -26,6 +28,8 @@ from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile from .types.deidentify_text_request_file import DeidentifyTextRequestFile +from .types.reidentify_file_request_file import ReidentifyFileRequestFile +from .types.reidentify_file_request_format import ReidentifyFileRequestFormat # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -51,6 +55,7 @@ def deidentify_file( *, vault_id: VaultId, file: DeidentifyFileRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -68,6 +73,8 @@ def deidentify_file( file : DeidentifyFileRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -105,6 +112,7 @@ def deidentify_file( _response = self._raw_client.deidentify_file( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -119,6 +127,7 @@ def deidentify_document( *, vault_id: VaultId, file: DeidentifyDocumentRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -136,6 +145,8 @@ def deidentify_document( file : DeidentifyDocumentRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -173,6 +184,7 @@ def deidentify_document( _response = self._raw_client.deidentify_document( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -187,6 +199,7 @@ def deidentify_pdf( *, vault_id: VaultId, file: DeidentifyPdfRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, density: typing.Optional[int] = OMIT, max_resolution: typing.Optional[int] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, @@ -206,6 +219,8 @@ def deidentify_pdf( file : DeidentifyPdfRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + density : typing.Optional[int] Pixel density at which to process the PDF file. @@ -248,6 +263,7 @@ def deidentify_pdf( _response = self._raw_client.deidentify_pdf( vault_id=vault_id, file=file, + configuration_id=configuration_id, density=density, max_resolution=max_resolution, entity_types=entity_types, @@ -264,6 +280,7 @@ def deidentify_image( *, vault_id: VaultId, file: DeidentifyImageRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_image: typing.Optional[bool] = OMIT, output_ocr_text: typing.Optional[bool] = OMIT, masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, @@ -284,6 +301,8 @@ def deidentify_image( file : DeidentifyImageRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] If `true`, includes processed image in the output. @@ -330,6 +349,7 @@ def deidentify_image( _response = self._raw_client.deidentify_image( vault_id=vault_id, file=file, + configuration_id=configuration_id, output_processed_image=output_processed_image, output_ocr_text=output_ocr_text, masking_method=masking_method, @@ -347,6 +367,7 @@ def deidentify_text( *, vault_id: VaultId, file: DeidentifyTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -364,6 +385,8 @@ def deidentify_text( file : DeidentifyTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -400,6 +423,7 @@ def deidentify_text( _response = self._raw_client.deidentify_text( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -414,6 +438,7 @@ def deidentify_structured_text( *, vault_id: VaultId, file: DeidentifyStructuredTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -431,6 +456,8 @@ def deidentify_structured_text( file : DeidentifyStructuredTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -468,6 +495,7 @@ def deidentify_structured_text( _response = self._raw_client.deidentify_structured_text( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -482,6 +510,7 @@ def deidentify_spreadsheet( *, vault_id: VaultId, file: DeidentifySpreadsheetRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -499,6 +528,8 @@ def deidentify_spreadsheet( file : DeidentifySpreadsheetRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -536,6 +567,7 @@ def deidentify_spreadsheet( _response = self._raw_client.deidentify_spreadsheet( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -550,6 +582,7 @@ def deidentify_presentation( *, vault_id: VaultId, file: DeidentifyPresentationRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -567,6 +600,8 @@ def deidentify_presentation( file : DeidentifyPresentationRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -604,6 +639,7 @@ def deidentify_presentation( _response = self._raw_client.deidentify_presentation( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -618,6 +654,7 @@ def deidentify_audio( *, vault_id: VaultId, file: DeidentifyAudioRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_audio: typing.Optional[bool] = OMIT, output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, bleep_gain: typing.Optional[float] = OMIT, @@ -641,6 +678,8 @@ def deidentify_audio( file : DeidentifyAudioRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_audio : typing.Optional[bool] If `true`, includes processed audio file in the response. @@ -696,6 +735,7 @@ def deidentify_audio( _response = self._raw_client.deidentify_audio( vault_id=vault_id, file=file, + configuration_id=configuration_id, output_processed_audio=output_processed_audio, output_transcription=output_transcription, bleep_gain=bleep_gain, @@ -748,6 +788,56 @@ def get_run( _response = self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) return _response.data + def reidentify_file( + self, + *, + vault_id: VaultId, + file: ReidentifyFileRequestFile, + format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyFileResponse: + """ + Re-identifies tokens in a file. + + Parameters + ---------- + vault_id : VaultId + + file : ReidentifyFileRequestFile + File to re-identify. Files are specified as Base64-encoded data or an EFS path. + + format : typing.Optional[ReidentifyFileRequestFormat] + Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ReidentifyFileResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + from skyflow.files import ReidentifyFileRequestFile + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.files.reidentify_file( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=ReidentifyFileRequestFile( + base_64="Zm9vYmFy", + data_format="txt", + ), + ) + """ + _response = self._raw_client.reidentify_file( + vault_id=vault_id, file=file, format=format, request_options=request_options + ) + return _response.data + class AsyncFilesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -769,6 +859,7 @@ async def deidentify_file( *, vault_id: VaultId, file: DeidentifyFileRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -786,6 +877,8 @@ async def deidentify_file( file : DeidentifyFileRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -831,6 +924,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_file( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -845,6 +939,7 @@ async def deidentify_document( *, vault_id: VaultId, file: DeidentifyDocumentRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -862,6 +957,8 @@ async def deidentify_document( file : DeidentifyDocumentRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -907,6 +1004,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_document( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -921,6 +1019,7 @@ async def deidentify_pdf( *, vault_id: VaultId, file: DeidentifyPdfRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, density: typing.Optional[int] = OMIT, max_resolution: typing.Optional[int] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, @@ -940,6 +1039,8 @@ async def deidentify_pdf( file : DeidentifyPdfRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + density : typing.Optional[int] Pixel density at which to process the PDF file. @@ -990,6 +1091,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_pdf( vault_id=vault_id, file=file, + configuration_id=configuration_id, density=density, max_resolution=max_resolution, entity_types=entity_types, @@ -1006,6 +1108,7 @@ async def deidentify_image( *, vault_id: VaultId, file: DeidentifyImageRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_image: typing.Optional[bool] = OMIT, output_ocr_text: typing.Optional[bool] = OMIT, masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, @@ -1026,6 +1129,8 @@ async def deidentify_image( file : DeidentifyImageRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] If `true`, includes processed image in the output. @@ -1080,6 +1185,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_image( vault_id=vault_id, file=file, + configuration_id=configuration_id, output_processed_image=output_processed_image, output_ocr_text=output_ocr_text, masking_method=masking_method, @@ -1097,6 +1203,7 @@ async def deidentify_text( *, vault_id: VaultId, file: DeidentifyTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1114,6 +1221,8 @@ async def deidentify_text( file : DeidentifyTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1158,6 +1267,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_text( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -1172,6 +1282,7 @@ async def deidentify_structured_text( *, vault_id: VaultId, file: DeidentifyStructuredTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1189,6 +1300,8 @@ async def deidentify_structured_text( file : DeidentifyStructuredTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1234,6 +1347,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_structured_text( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -1248,6 +1362,7 @@ async def deidentify_spreadsheet( *, vault_id: VaultId, file: DeidentifySpreadsheetRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1265,6 +1380,8 @@ async def deidentify_spreadsheet( file : DeidentifySpreadsheetRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1310,6 +1427,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_spreadsheet( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -1324,6 +1442,7 @@ async def deidentify_presentation( *, vault_id: VaultId, file: DeidentifyPresentationRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1341,6 +1460,8 @@ async def deidentify_presentation( file : DeidentifyPresentationRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1386,6 +1507,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_presentation( vault_id=vault_id, file=file, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -1400,6 +1522,7 @@ async def deidentify_audio( *, vault_id: VaultId, file: DeidentifyAudioRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_audio: typing.Optional[bool] = OMIT, output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, bleep_gain: typing.Optional[float] = OMIT, @@ -1423,6 +1546,8 @@ async def deidentify_audio( file : DeidentifyAudioRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_audio : typing.Optional[bool] If `true`, includes processed audio file in the response. @@ -1486,6 +1611,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_audio( vault_id=vault_id, file=file, + configuration_id=configuration_id, output_processed_audio=output_processed_audio, output_transcription=output_transcription, bleep_gain=bleep_gain, @@ -1545,3 +1671,61 @@ async def main() -> None: """ _response = await self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) return _response.data + + async def reidentify_file( + self, + *, + vault_id: VaultId, + file: ReidentifyFileRequestFile, + format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyFileResponse: + """ + Re-identifies tokens in a file. + + Parameters + ---------- + vault_id : VaultId + + file : ReidentifyFileRequestFile + File to re-identify. Files are specified as Base64-encoded data or an EFS path. + + format : typing.Optional[ReidentifyFileRequestFormat] + Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ReidentifyFileResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + from skyflow.files import ReidentifyFileRequestFile + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.files.reidentify_file( + vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", + file=ReidentifyFileRequestFile( + base_64="Zm9vYmFy", + data_format="txt", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.reidentify_file( + vault_id=vault_id, file=file, format=format, request_options=request_options + ) + return _response.data diff --git a/skyflow/generated/rest/files/raw_client.py b/skyflow/generated/rest/files/raw_client.py index b3193544..5a67292f 100644 --- a/skyflow/generated/rest/files/raw_client.py +++ b/skyflow/generated/rest/files/raw_client.py @@ -15,10 +15,12 @@ from ..errors.not_found_error import NotFoundError from ..errors.unauthorized_error import UnauthorizedError from ..types.allow_regex import AllowRegex +from ..types.configuration_id import ConfigurationId from ..types.deidentify_file_response import DeidentifyFileResponse from ..types.deidentify_status_response import DeidentifyStatusResponse from ..types.entity_types import EntityTypes from ..types.error_response import ErrorResponse +from ..types.reidentify_file_response import ReidentifyFileResponse from ..types.resource_id import ResourceId from ..types.restrict_regex import RestrictRegex from ..types.token_type_without_vault import TokenTypeWithoutVault @@ -36,6 +38,8 @@ from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile from .types.deidentify_text_request_file import DeidentifyTextRequestFile +from .types.reidentify_file_request_file import ReidentifyFileRequestFile +from .types.reidentify_file_request_format import ReidentifyFileRequestFormat # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -50,6 +54,7 @@ def deidentify_file( *, vault_id: VaultId, file: DeidentifyFileRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -67,6 +72,8 @@ def deidentify_file( file : DeidentifyFileRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -93,6 +100,7 @@ def deidentify_file( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyFileRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -162,6 +170,7 @@ def deidentify_document( *, vault_id: VaultId, file: DeidentifyDocumentRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -179,6 +188,8 @@ def deidentify_document( file : DeidentifyDocumentRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -205,6 +216,7 @@ def deidentify_document( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -274,6 +286,7 @@ def deidentify_pdf( *, vault_id: VaultId, file: DeidentifyPdfRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, density: typing.Optional[int] = OMIT, max_resolution: typing.Optional[int] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, @@ -293,6 +306,8 @@ def deidentify_pdf( file : DeidentifyPdfRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + density : typing.Optional[int] Pixel density at which to process the PDF file. @@ -325,6 +340,7 @@ def deidentify_pdf( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyPdfRequestFile, direction="write" ), + "configuration_id": configuration_id, "density": density, "max_resolution": max_resolution, "entity_types": entity_types, @@ -396,6 +412,7 @@ def deidentify_image( *, vault_id: VaultId, file: DeidentifyImageRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_image: typing.Optional[bool] = OMIT, output_ocr_text: typing.Optional[bool] = OMIT, masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, @@ -416,6 +433,8 @@ def deidentify_image( file : DeidentifyImageRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] If `true`, includes processed image in the output. @@ -451,6 +470,7 @@ def deidentify_image( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyImageRequestFile, direction="write" ), + "configuration_id": configuration_id, "output_processed_image": output_processed_image, "output_ocr_text": output_ocr_text, "masking_method": masking_method, @@ -523,6 +543,7 @@ def deidentify_text( *, vault_id: VaultId, file: DeidentifyTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -540,6 +561,8 @@ def deidentify_text( file : DeidentifyTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -566,6 +589,7 @@ def deidentify_text( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyTextRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -635,6 +659,7 @@ def deidentify_structured_text( *, vault_id: VaultId, file: DeidentifyStructuredTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -652,6 +677,8 @@ def deidentify_structured_text( file : DeidentifyStructuredTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -678,6 +705,7 @@ def deidentify_structured_text( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -747,6 +775,7 @@ def deidentify_spreadsheet( *, vault_id: VaultId, file: DeidentifySpreadsheetRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -764,6 +793,8 @@ def deidentify_spreadsheet( file : DeidentifySpreadsheetRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -790,6 +821,7 @@ def deidentify_spreadsheet( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -859,6 +891,7 @@ def deidentify_presentation( *, vault_id: VaultId, file: DeidentifyPresentationRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -876,6 +909,8 @@ def deidentify_presentation( file : DeidentifyPresentationRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -902,6 +937,7 @@ def deidentify_presentation( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -971,6 +1007,7 @@ def deidentify_audio( *, vault_id: VaultId, file: DeidentifyAudioRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_audio: typing.Optional[bool] = OMIT, output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, bleep_gain: typing.Optional[float] = OMIT, @@ -994,6 +1031,8 @@ def deidentify_audio( file : DeidentifyAudioRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_audio : typing.Optional[bool] If `true`, includes processed audio file in the response. @@ -1038,6 +1077,7 @@ def deidentify_audio( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyAudioRequestFile, direction="write" ), + "configuration_id": configuration_id, "output_processed_audio": output_processed_audio, "output_transcription": output_transcription, "bleep_gain": bleep_gain, @@ -1197,6 +1237,101 @@ def get_run( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + def reidentify_file( + self, + *, + vault_id: VaultId, + file: ReidentifyFileRequestFile, + format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ReidentifyFileResponse]: + """ + Re-identifies tokens in a file. + + Parameters + ---------- + vault_id : VaultId + + file : ReidentifyFileRequestFile + File to re-identify. Files are specified as Base64-encoded data or an EFS path. + + format : typing.Optional[ReidentifyFileRequestFormat] + Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ReidentifyFileResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/reidentify/file", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=ReidentifyFileRequestFile, direction="write" + ), + "format": convert_and_respect_annotation_metadata( + object_=format, annotation=ReidentifyFileRequestFormat, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ReidentifyFileResponse, + parse_obj_as( + type_=ReidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + class AsyncRawFilesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -1207,6 +1342,7 @@ async def deidentify_file( *, vault_id: VaultId, file: DeidentifyFileRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1224,6 +1360,8 @@ async def deidentify_file( file : DeidentifyFileRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1250,6 +1388,7 @@ async def deidentify_file( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyFileRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -1319,6 +1458,7 @@ async def deidentify_document( *, vault_id: VaultId, file: DeidentifyDocumentRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1336,6 +1476,8 @@ async def deidentify_document( file : DeidentifyDocumentRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1362,6 +1504,7 @@ async def deidentify_document( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -1431,6 +1574,7 @@ async def deidentify_pdf( *, vault_id: VaultId, file: DeidentifyPdfRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, density: typing.Optional[int] = OMIT, max_resolution: typing.Optional[int] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, @@ -1450,6 +1594,8 @@ async def deidentify_pdf( file : DeidentifyPdfRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + density : typing.Optional[int] Pixel density at which to process the PDF file. @@ -1482,6 +1628,7 @@ async def deidentify_pdf( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyPdfRequestFile, direction="write" ), + "configuration_id": configuration_id, "density": density, "max_resolution": max_resolution, "entity_types": entity_types, @@ -1553,6 +1700,7 @@ async def deidentify_image( *, vault_id: VaultId, file: DeidentifyImageRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_image: typing.Optional[bool] = OMIT, output_ocr_text: typing.Optional[bool] = OMIT, masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, @@ -1573,6 +1721,8 @@ async def deidentify_image( file : DeidentifyImageRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] If `true`, includes processed image in the output. @@ -1608,6 +1758,7 @@ async def deidentify_image( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyImageRequestFile, direction="write" ), + "configuration_id": configuration_id, "output_processed_image": output_processed_image, "output_ocr_text": output_ocr_text, "masking_method": masking_method, @@ -1680,6 +1831,7 @@ async def deidentify_text( *, vault_id: VaultId, file: DeidentifyTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1697,6 +1849,8 @@ async def deidentify_text( file : DeidentifyTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1723,6 +1877,7 @@ async def deidentify_text( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyTextRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -1792,6 +1947,7 @@ async def deidentify_structured_text( *, vault_id: VaultId, file: DeidentifyStructuredTextRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1809,6 +1965,8 @@ async def deidentify_structured_text( file : DeidentifyStructuredTextRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1835,6 +1993,7 @@ async def deidentify_structured_text( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -1904,6 +2063,7 @@ async def deidentify_spreadsheet( *, vault_id: VaultId, file: DeidentifySpreadsheetRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1921,6 +2081,8 @@ async def deidentify_spreadsheet( file : DeidentifySpreadsheetRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -1947,6 +2109,7 @@ async def deidentify_spreadsheet( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -2016,6 +2179,7 @@ async def deidentify_presentation( *, vault_id: VaultId, file: DeidentifyPresentationRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -2033,6 +2197,8 @@ async def deidentify_presentation( file : DeidentifyPresentationRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenTypeWithoutVault] @@ -2059,6 +2225,7 @@ async def deidentify_presentation( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" ), + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenTypeWithoutVault, direction="write" @@ -2128,6 +2295,7 @@ async def deidentify_audio( *, vault_id: VaultId, file: DeidentifyAudioRequestFile, + configuration_id: typing.Optional[ConfigurationId] = OMIT, output_processed_audio: typing.Optional[bool] = OMIT, output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, bleep_gain: typing.Optional[float] = OMIT, @@ -2151,6 +2319,8 @@ async def deidentify_audio( file : DeidentifyAudioRequestFile File to de-identify. Files are specified as Base64-encoded data. + configuration_id : typing.Optional[ConfigurationId] + output_processed_audio : typing.Optional[bool] If `true`, includes processed audio file in the response. @@ -2195,6 +2365,7 @@ async def deidentify_audio( "file": convert_and_respect_annotation_metadata( object_=file, annotation=DeidentifyAudioRequestFile, direction="write" ), + "configuration_id": configuration_id, "output_processed_audio": output_processed_audio, "output_transcription": output_transcription, "bleep_gain": bleep_gain, @@ -2353,3 +2524,98 @@ async def get_run( except JSONDecodeError: raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def reidentify_file( + self, + *, + vault_id: VaultId, + file: ReidentifyFileRequestFile, + format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ReidentifyFileResponse]: + """ + Re-identifies tokens in a file. + + Parameters + ---------- + vault_id : VaultId + + file : ReidentifyFileRequestFile + File to re-identify. Files are specified as Base64-encoded data or an EFS path. + + format : typing.Optional[ReidentifyFileRequestFormat] + Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ReidentifyFileResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/reidentify/file", + method="POST", + json={ + "vault_id": vault_id, + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=ReidentifyFileRequestFile, direction="write" + ), + "format": convert_and_respect_annotation_metadata( + object_=format, annotation=ReidentifyFileRequestFormat, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ReidentifyFileResponse, + parse_obj_as( + type_=ReidentifyFileResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/files/types/__init__.py b/skyflow/generated/rest/files/types/__init__.py index e6343d60..78943a33 100644 --- a/skyflow/generated/rest/files/types/__init__.py +++ b/skyflow/generated/rest/files/types/__init__.py @@ -20,6 +20,9 @@ from .deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile from .deidentify_structured_text_request_file_data_format import DeidentifyStructuredTextRequestFileDataFormat from .deidentify_text_request_file import DeidentifyTextRequestFile +from .reidentify_file_request_file import ReidentifyFileRequestFile +from .reidentify_file_request_file_data_format import ReidentifyFileRequestFileDataFormat +from .reidentify_file_request_format import ReidentifyFileRequestFormat __all__ = [ "DeidentifyAudioRequestFile", @@ -40,4 +43,7 @@ "DeidentifyStructuredTextRequestFile", "DeidentifyStructuredTextRequestFileDataFormat", "DeidentifyTextRequestFile", + "ReidentifyFileRequestFile", + "ReidentifyFileRequestFileDataFormat", + "ReidentifyFileRequestFormat", ] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py index 20581451..f3294014 100644 --- a/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py +++ b/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py @@ -6,6 +6,7 @@ typing.Literal[ "bmp", "csv", + "dcm", "doc", "docx", "jpeg", diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py index bc0c338c..d1ff8c83 100644 --- a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py +++ b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py @@ -2,4 +2,4 @@ import typing -DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackbox", "blur"], typing.Any] +DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackout", "blur"], typing.Any] diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_file.py b/skyflow/generated/rest/files/types/reidentify_file_request_file.py new file mode 100644 index 00000000..429f22ee --- /dev/null +++ b/skyflow/generated/rest/files/types/reidentify_file_request_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata +from .reidentify_file_request_file_data_format import ReidentifyFileRequestFileDataFormat + + +class ReidentifyFileRequestFile(UniversalBaseModel): + """ + File to re-identify. Files are specified as Base64-encoded data or an EFS path. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file to re-identify. + """ + + data_format: ReidentifyFileRequestFileDataFormat = pydantic.Field() + """ + Data format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py b/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py new file mode 100644 index 00000000..5aca9bb6 --- /dev/null +++ b/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ReidentifyFileRequestFileDataFormat = typing.Union[ + typing.Literal["csv", "doc", "docx", "json", "txt", "xls", "xlsx", "xml"], typing.Any +] diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_format.py b/skyflow/generated/rest/files/types/reidentify_file_request_format.py new file mode 100644 index 00000000..ec7ca5f1 --- /dev/null +++ b/skyflow/generated/rest/files/types/reidentify_file_request_format.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.entity_type import EntityType + + +class ReidentifyFileRequestFormat(UniversalBaseModel): + """ + Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + """ + + redacted: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to fully redact. + """ + + masked: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to mask. + """ + + plaintext: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + """ + Entity types to return in plaintext. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/deprecated/__init__.py b/skyflow/generated/rest/guardrails/__init__.py similarity index 100% rename from skyflow/generated/rest/deprecated/__init__.py rename to skyflow/generated/rest/guardrails/__init__.py diff --git a/skyflow/generated/rest/guardrails/client.py b/skyflow/generated/rest/guardrails/client.py new file mode 100644 index 00000000..169f7de1 --- /dev/null +++ b/skyflow/generated/rest/guardrails/client.py @@ -0,0 +1,164 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.check_guardrails_response import CheckGuardrailsResponse +from ..types.vault_id import VaultId +from .raw_client import AsyncRawGuardrailsClient, RawGuardrailsClient + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class GuardrailsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._raw_client = RawGuardrailsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawGuardrailsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawGuardrailsClient + """ + return self._raw_client + + def check_guardrails( + self, + *, + vault_id: VaultId, + text: str, + check_toxicity: typing.Optional[bool] = OMIT, + deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> CheckGuardrailsResponse: + """ + Preserve safety and compliance with usage policies. + + Parameters + ---------- + vault_id : VaultId + + text : str + Text to check against guardrails. + + check_toxicity : typing.Optional[bool] + Check for toxicity in the text. + + deny_topics : typing.Optional[typing.Sequence[str]] + List of topics to deny. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CheckGuardrailsResponse + A successful response. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.guardrails.check_guardrails( + vault_id="VAULT_ID", + text="I love to play cricket.", + check_toxicity=True, + deny_topics=["sports"], + ) + """ + _response = self._raw_client.check_guardrails( + vault_id=vault_id, + text=text, + check_toxicity=check_toxicity, + deny_topics=deny_topics, + request_options=request_options, + ) + return _response.data + + +class AsyncGuardrailsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._raw_client = AsyncRawGuardrailsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawGuardrailsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawGuardrailsClient + """ + return self._raw_client + + async def check_guardrails( + self, + *, + vault_id: VaultId, + text: str, + check_toxicity: typing.Optional[bool] = OMIT, + deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> CheckGuardrailsResponse: + """ + Preserve safety and compliance with usage policies. + + Parameters + ---------- + vault_id : VaultId + + text : str + Text to check against guardrails. + + check_toxicity : typing.Optional[bool] + Check for toxicity in the text. + + deny_topics : typing.Optional[typing.Sequence[str]] + List of topics to deny. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CheckGuardrailsResponse + A successful response. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.guardrails.check_guardrails( + vault_id="VAULT_ID", + text="I love to play cricket.", + check_toxicity=True, + deny_topics=["sports"], + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.check_guardrails( + vault_id=vault_id, + text=text, + check_toxicity=check_toxicity, + deny_topics=deny_topics, + request_options=request_options, + ) + return _response.data diff --git a/skyflow/generated/rest/guardrails/raw_client.py b/skyflow/generated/rest/guardrails/raw_client.py new file mode 100644 index 00000000..11030fd3 --- /dev/null +++ b/skyflow/generated/rest/guardrails/raw_client.py @@ -0,0 +1,221 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.check_guardrails_response import CheckGuardrailsResponse +from ..types.error_response import ErrorResponse +from ..types.vault_id import VaultId + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawGuardrailsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def check_guardrails( + self, + *, + vault_id: VaultId, + text: str, + check_toxicity: typing.Optional[bool] = OMIT, + deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[CheckGuardrailsResponse]: + """ + Preserve safety and compliance with usage policies. + + Parameters + ---------- + vault_id : VaultId + + text : str + Text to check against guardrails. + + check_toxicity : typing.Optional[bool] + Check for toxicity in the text. + + deny_topics : typing.Optional[typing.Sequence[str]] + List of topics to deny. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[CheckGuardrailsResponse] + A successful response. + """ + _response = self._client_wrapper.httpx_client.request( + "v1/detect/guardrails", + method="POST", + json={ + "vault_id": vault_id, + "text": text, + "check_toxicity": check_toxicity, + "deny_topics": deny_topics, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CheckGuardrailsResponse, + parse_obj_as( + type_=CheckGuardrailsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawGuardrailsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def check_guardrails( + self, + *, + vault_id: VaultId, + text: str, + check_toxicity: typing.Optional[bool] = OMIT, + deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[CheckGuardrailsResponse]: + """ + Preserve safety and compliance with usage policies. + + Parameters + ---------- + vault_id : VaultId + + text : str + Text to check against guardrails. + + check_toxicity : typing.Optional[bool] + Check for toxicity in the text. + + deny_topics : typing.Optional[typing.Sequence[str]] + List of topics to deny. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[CheckGuardrailsResponse] + A successful response. + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/detect/guardrails", + method="POST", + json={ + "vault_id": vault_id, + "text": text, + "check_toxicity": check_toxicity, + "deny_topics": deny_topics, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + CheckGuardrailsResponse, + parse_obj_as( + type_=CheckGuardrailsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/strings/client.py b/skyflow/generated/rest/strings/client.py index 5c71662d..14b2266d 100644 --- a/skyflow/generated/rest/strings/client.py +++ b/skyflow/generated/rest/strings/client.py @@ -5,6 +5,7 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.allow_regex import AllowRegex +from ..types.configuration_id import ConfigurationId from ..types.deidentify_string_response import DeidentifyStringResponse from ..types.entity_types import EntityTypes from ..types.reidentify_string_response import ReidentifyStringResponse @@ -39,6 +40,7 @@ def deidentify_string( *, vault_id: VaultId, text: str, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenType] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -56,6 +58,8 @@ def deidentify_string( text : str String to de-identify. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenType] @@ -89,6 +93,7 @@ def deidentify_string( _response = self._raw_client.deidentify_string( vault_id=vault_id, text=text, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, @@ -166,6 +171,7 @@ async def deidentify_string( *, vault_id: VaultId, text: str, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenType] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -183,6 +189,8 @@ async def deidentify_string( text : str String to de-identify. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenType] @@ -224,6 +232,7 @@ async def main() -> None: _response = await self._raw_client.deidentify_string( vault_id=vault_id, text=text, + configuration_id=configuration_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, diff --git a/skyflow/generated/rest/strings/raw_client.py b/skyflow/generated/rest/strings/raw_client.py index ad67433a..3ae9bf41 100644 --- a/skyflow/generated/rest/strings/raw_client.py +++ b/skyflow/generated/rest/strings/raw_client.py @@ -13,6 +13,7 @@ from ..errors.internal_server_error import InternalServerError from ..errors.unauthorized_error import UnauthorizedError from ..types.allow_regex import AllowRegex +from ..types.configuration_id import ConfigurationId from ..types.deidentify_string_response import DeidentifyStringResponse from ..types.entity_types import EntityTypes from ..types.error_response import ErrorResponse @@ -36,6 +37,7 @@ def deidentify_string( *, vault_id: VaultId, text: str, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenType] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -53,6 +55,8 @@ def deidentify_string( text : str String to de-identify. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenType] @@ -77,6 +81,7 @@ def deidentify_string( json={ "vault_id": vault_id, "text": text, + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenType, direction="write" @@ -245,6 +250,7 @@ async def deidentify_string( *, vault_id: VaultId, text: str, + configuration_id: typing.Optional[ConfigurationId] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenType] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -262,6 +268,8 @@ async def deidentify_string( text : str String to de-identify. + configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[EntityTypes] token_type : typing.Optional[TokenType] @@ -286,6 +294,7 @@ async def deidentify_string( json={ "vault_id": vault_id, "text": text, + "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( object_=token_type, annotation=TokenType, direction="write" diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index 74b8a5d1..5a48e4f4 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -2,16 +2,15 @@ # isort: skip_file -from .advanced_options_column_mapping import AdvancedOptionsColumnMapping -from .advanced_options_entity_column_map import AdvancedOptionsEntityColumnMap -from .advanced_options_vault_schema import AdvancedOptionsVaultSchema from .allow_regex import AllowRegex -from .audio_config_transcription_type import AudioConfigTranscriptionType from .audit_event_audit_resource_type import AuditEventAuditResourceType from .audit_event_context import AuditEventContext from .audit_event_data import AuditEventData from .audit_event_http_info import AuditEventHttpInfo from .batch_record_method import BatchRecordMethod +from .check_guardrails_response import CheckGuardrailsResponse +from .check_guardrails_response_validation import CheckGuardrailsResponseValidation +from .configuration_id import ConfigurationId from .context_access_type import ContextAccessType from .context_auth_mode import ContextAuthMode from .deidentify_file_output import DeidentifyFileOutput @@ -20,12 +19,7 @@ from .deidentify_status_response import DeidentifyStatusResponse from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType from .deidentify_status_response_status import DeidentifyStatusResponseStatus -from .deidentify_status_response_word_character_count import DeidentifyStatusResponseWordCharacterCount from .deidentify_string_response import DeidentifyStringResponse -from .detect_data_accuracy import DetectDataAccuracy -from .detect_data_entities import DetectDataEntities -from .detect_file_request_data_type import DetectFileRequestDataType -from .detect_request_deidentify_option import DetectRequestDeidentifyOption from .detected_entity import DetectedEntity from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType from .entity_location import EntityLocation @@ -35,9 +29,11 @@ from .error_response_error import ErrorResponseError from .error_string import ErrorString from .googlerpc_status import GooglerpcStatus -from .processed_file_output_processed_file_type import ProcessedFileOutputProcessedFileType from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction +from .reidentify_file_response import ReidentifyFileResponse +from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentify_file_response_status import ReidentifyFileResponseStatus from .reidentify_string_response import ReidentifyStringResponse from .request_action_type import RequestActionType from .resource_id import ResourceId @@ -50,9 +46,6 @@ from .transformations_shift_dates import TransformationsShiftDates from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem from .uuid_ import Uuid -from .v_1_advanced_options import V1AdvancedOptions -from .v_1_audio_config import V1AudioConfig -from .v_1_audio_options import V1AudioOptions from .v_1_audit_after_options import V1AuditAfterOptions from .v_1_audit_event_response import V1AuditEventResponse from .v_1_audit_response import V1AuditResponse @@ -67,29 +60,17 @@ from .v_1_card import V1Card from .v_1_delete_file_response import V1DeleteFileResponse from .v_1_delete_record_response import V1DeleteRecordResponse -from .v_1_detect_file_response import V1DetectFileResponse -from .v_1_detect_status_response import V1DetectStatusResponse -from .v_1_detect_status_response_status import V1DetectStatusResponseStatus -from .v_1_detect_text_request import V1DetectTextRequest -from .v_1_detect_text_response import V1DetectTextResponse from .v_1_detokenize_record_request import V1DetokenizeRecordRequest from .v_1_detokenize_record_response import V1DetokenizeRecordResponse from .v_1_detokenize_response import V1DetokenizeResponse from .v_1_field_records import V1FieldRecords from .v_1_file_av_scan_status import V1FileAvScanStatus -from .v_1_file_data_format import V1FileDataFormat from .v_1_get_auth_token_response import V1GetAuthTokenResponse from .v_1_get_file_scan_status_response import V1GetFileScanStatusResponse from .v_1_get_query_response import V1GetQueryResponse -from .v_1_image_options import V1ImageOptions from .v_1_insert_record_response import V1InsertRecordResponse -from .v_1_locations import V1Locations from .v_1_member_type import V1MemberType -from .v_1_pdf_config import V1PdfConfig -from .v_1_pdf_options import V1PdfOptions -from .v_1_processed_file_output import V1ProcessedFileOutput from .v_1_record_meta_properties import V1RecordMetaProperties -from .v_1_response_entities import V1ResponseEntities from .v_1_tokenize_record_request import V1TokenizeRecordRequest from .v_1_tokenize_record_response import V1TokenizeRecordResponse from .v_1_tokenize_response import V1TokenizeResponse @@ -99,16 +80,15 @@ from .vault_id import VaultId __all__ = [ - "AdvancedOptionsColumnMapping", - "AdvancedOptionsEntityColumnMap", - "AdvancedOptionsVaultSchema", "AllowRegex", - "AudioConfigTranscriptionType", "AuditEventAuditResourceType", "AuditEventContext", "AuditEventData", "AuditEventHttpInfo", "BatchRecordMethod", + "CheckGuardrailsResponse", + "CheckGuardrailsResponseValidation", + "ConfigurationId", "ContextAccessType", "ContextAuthMode", "DeidentifyFileOutput", @@ -117,12 +97,7 @@ "DeidentifyStatusResponse", "DeidentifyStatusResponseOutputType", "DeidentifyStatusResponseStatus", - "DeidentifyStatusResponseWordCharacterCount", "DeidentifyStringResponse", - "DetectDataAccuracy", - "DetectDataEntities", - "DetectFileRequestDataType", - "DetectRequestDeidentifyOption", "DetectedEntity", "DetokenizeRecordResponseValueType", "EntityLocation", @@ -132,9 +107,11 @@ "ErrorResponseError", "ErrorString", "GooglerpcStatus", - "ProcessedFileOutputProcessedFileType", "ProtobufAny", "RedactionEnumRedaction", + "ReidentifyFileResponse", + "ReidentifyFileResponseOutput", + "ReidentifyFileResponseStatus", "ReidentifyStringResponse", "RequestActionType", "ResourceId", @@ -147,9 +124,6 @@ "TransformationsShiftDates", "TransformationsShiftDatesEntityTypesItem", "Uuid", - "V1AdvancedOptions", - "V1AudioConfig", - "V1AudioOptions", "V1AuditAfterOptions", "V1AuditEventResponse", "V1AuditResponse", @@ -164,29 +138,17 @@ "V1Card", "V1DeleteFileResponse", "V1DeleteRecordResponse", - "V1DetectFileResponse", - "V1DetectStatusResponse", - "V1DetectStatusResponseStatus", - "V1DetectTextRequest", - "V1DetectTextResponse", "V1DetokenizeRecordRequest", "V1DetokenizeRecordResponse", "V1DetokenizeResponse", "V1FieldRecords", "V1FileAvScanStatus", - "V1FileDataFormat", "V1GetAuthTokenResponse", "V1GetFileScanStatusResponse", "V1GetQueryResponse", - "V1ImageOptions", "V1InsertRecordResponse", - "V1Locations", "V1MemberType", - "V1PdfConfig", - "V1PdfOptions", - "V1ProcessedFileOutput", "V1RecordMetaProperties", - "V1ResponseEntities", "V1TokenizeRecordRequest", "V1TokenizeRecordResponse", "V1TokenizeResponse", diff --git a/skyflow/generated/rest/types/advanced_options_column_mapping.py b/skyflow/generated/rest/types/advanced_options_column_mapping.py deleted file mode 100644 index 8369b329..00000000 --- a/skyflow/generated/rest/types/advanced_options_column_mapping.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .advanced_options_entity_column_map import AdvancedOptionsEntityColumnMap - - -class AdvancedOptionsColumnMapping(UniversalBaseModel): - """ - Contains map of what has to be stored in which column. - """ - - session_id: str = pydantic.Field() - """ - Table name of the vault. - """ - - default: str = pydantic.Field() - """ - Name of column to store data in when no explicit mapping exists. - """ - - entity_column_map: typing.Optional[typing.List[AdvancedOptionsEntityColumnMap]] = pydantic.Field(default=None) - """ - Column mapping for different entities. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/advanced_options_entity_column_map.py b/skyflow/generated/rest/types/advanced_options_entity_column_map.py deleted file mode 100644 index debf836a..00000000 --- a/skyflow/generated/rest/types/advanced_options_entity_column_map.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .detect_data_entities import DetectDataEntities - - -class AdvancedOptionsEntityColumnMap(UniversalBaseModel): - """ - Contains map of what entity has to be stored in which column. - """ - - entity_type: typing.Optional[DetectDataEntities] = None - column_name: typing.Optional[str] = pydantic.Field(default=None) - """ - Column name where the entity has to be stored. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/advanced_options_vault_schema.py b/skyflow/generated/rest/types/advanced_options_vault_schema.py deleted file mode 100644 index 8496eb97..00000000 --- a/skyflow/generated/rest/types/advanced_options_vault_schema.py +++ /dev/null @@ -1,29 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .advanced_options_column_mapping import AdvancedOptionsColumnMapping - - -class AdvancedOptionsVaultSchema(UniversalBaseModel): - """ - Contains table name and column mapping. - """ - - table_name: str = pydantic.Field() - """ - Table name of the vault. - """ - - mapping: AdvancedOptionsColumnMapping - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/audio_config_transcription_type.py b/skyflow/generated/rest/types/audio_config_transcription_type.py deleted file mode 100644 index 13ad88d9..00000000 --- a/skyflow/generated/rest/types/audio_config_transcription_type.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -AudioConfigTranscriptionType = typing.Union[ - typing.Literal[ - "none", - "skyflow_transcription", - "aws_transcription", - "aws_transcription_diarize", - "aws_medical_transcription", - "aws_medical_transcription_diarize", - "aws_transcription_diarize_json", - "deepgram_transcription_diarize", - "deepgram_transcription_json", - "deepgram_wrapper", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/types/check_guardrails_response.py b/skyflow/generated/rest/types/check_guardrails_response.py new file mode 100644 index 00000000..ad8e2dbf --- /dev/null +++ b/skyflow/generated/rest/types/check_guardrails_response.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .check_guardrails_response_validation import CheckGuardrailsResponseValidation + + +class CheckGuardrailsResponse(UniversalBaseModel): + """ + Response to check guardrails. + """ + + text: typing.Optional[str] = pydantic.Field(default=None) + """ + Text that was checked against guardrails. + """ + + toxicity: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether the text is toxic. + """ + + denied_topics: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether any denied topics were found. + """ + + validation: typing.Optional[CheckGuardrailsResponseValidation] = pydantic.Field(default=None) + """ + Validation result. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/check_guardrails_response_validation.py b/skyflow/generated/rest/types/check_guardrails_response_validation.py new file mode 100644 index 00000000..dcb0b789 --- /dev/null +++ b/skyflow/generated/rest/types/check_guardrails_response_validation.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +CheckGuardrailsResponseValidation = typing.Union[typing.Literal["failed", "passed"], typing.Any] diff --git a/skyflow/generated/rest/types/configuration_id.py b/skyflow/generated/rest/types/configuration_id.py new file mode 100644 index 00000000..763ae161 --- /dev/null +++ b/skyflow/generated/rest/types/configuration_id.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +ConfigurationId = str diff --git a/skyflow/generated/rest/types/deidentify_file_output.py b/skyflow/generated/rest/types/deidentify_file_output.py index a4c2da4d..7e17e168 100644 --- a/skyflow/generated/rest/types/deidentify_file_output.py +++ b/skyflow/generated/rest/types/deidentify_file_output.py @@ -3,9 +3,7 @@ import typing import pydantic -import typing_extensions from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ..core.serialization import FieldMetadata from .deidentify_file_output_processed_file_type import DeidentifyFileOutputProcessedFileType @@ -14,23 +12,17 @@ class DeidentifyFileOutput(UniversalBaseModel): Details and contents of the processed file. """ - processed_file: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="processedFile")] = ( - pydantic.Field(default=None) - ) + processed_file: typing.Optional[str] = pydantic.Field(default=None) """ URL or base64-encoded data of the output. """ - processed_file_type: typing_extensions.Annotated[ - typing.Optional[DeidentifyFileOutputProcessedFileType], FieldMetadata(alias="processedFileType") - ] = pydantic.Field(default=None) + processed_file_type: typing.Optional[DeidentifyFileOutputProcessedFileType] = pydantic.Field(default=None) """ Type of the processed file. """ - processed_file_extension: typing_extensions.Annotated[ - typing.Optional[str], FieldMetadata(alias="processedFileExtension") - ] = pydantic.Field(default=None) + processed_file_extension: typing.Optional[str] = pydantic.Field(default=None) """ Extension of the processed file. """ diff --git a/skyflow/generated/rest/types/deidentify_status_response.py b/skyflow/generated/rest/types/deidentify_status_response.py index 0ad91e62..a276963c 100644 --- a/skyflow/generated/rest/types/deidentify_status_response.py +++ b/skyflow/generated/rest/types/deidentify_status_response.py @@ -3,13 +3,10 @@ import typing import pydantic -import typing_extensions from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ..core.serialization import FieldMetadata from .deidentify_file_output import DeidentifyFileOutput from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType from .deidentify_status_response_status import DeidentifyStatusResponseStatus -from .deidentify_status_response_word_character_count import DeidentifyStatusResponseWordCharacterCount class DeidentifyStatusResponse(UniversalBaseModel): @@ -27,9 +24,7 @@ class DeidentifyStatusResponse(UniversalBaseModel): How the input file was specified. """ - output_type: typing_extensions.Annotated[ - typing.Optional[DeidentifyStatusResponseOutputType], FieldMetadata(alias="outputType") - ] = pydantic.Field(default=None) + output_type: typing.Optional[DeidentifyStatusResponseOutputType] = pydantic.Field(default=None) """ How the output file is specified. """ @@ -39,11 +34,14 @@ class DeidentifyStatusResponse(UniversalBaseModel): Status details about the detect run. """ - word_character_count: typing_extensions.Annotated[ - typing.Optional[DeidentifyStatusResponseWordCharacterCount], FieldMetadata(alias="wordCharacterCount") - ] = pydantic.Field(default=None) + word_count: typing.Optional[int] = pydantic.Field(default=None) """ - Word and character count in the processed text. + Number of words in the processed text. + """ + + character_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Number of characters in the processed text. """ size: typing.Optional[float] = pydantic.Field(default=None) @@ -51,7 +49,7 @@ class DeidentifyStatusResponse(UniversalBaseModel): Size of the processed text in kilobytes (KB). """ - duration: typing.Optional[float] = pydantic.Field(default=None) + duration: typing.Optional[int] = pydantic.Field(default=None) """ Duration of the processed audio in seconds. """ diff --git a/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py b/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py deleted file mode 100644 index 6584ca92..00000000 --- a/skyflow/generated/rest/types/deidentify_status_response_word_character_count.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ..core.serialization import FieldMetadata - - -class DeidentifyStatusResponseWordCharacterCount(UniversalBaseModel): - """ - Word and character count in the processed text. - """ - - word_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="wordCount")] = None - character_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="characterCount")] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/detect_data_accuracy.py b/skyflow/generated/rest/types/detect_data_accuracy.py deleted file mode 100644 index 91e3619e..00000000 --- a/skyflow/generated/rest/types/detect_data_accuracy.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DetectDataAccuracy = typing.Union[ - typing.Literal[ - "unknown", - "standard", - "standard_plus", - "standard_plus_multilingual", - "standard_plus_automatic", - "high", - "high_multilingual", - "high_automatic", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/types/detect_data_entities.py b/skyflow/generated/rest/types/detect_data_entities.py deleted file mode 100644 index 4ac0bd49..00000000 --- a/skyflow/generated/rest/types/detect_data_entities.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DetectDataEntities = typing.Union[ - typing.Literal[ - "age", - "bank_account", - "credit_card", - "credit_card_expiration", - "cvv", - "date", - "date_interval", - "dob", - "driver_license", - "email_address", - "healthcare_number", - "ip_address", - "location", - "name", - "numerical_pii", - "phone_number", - "ssn", - "url", - "vehicle_id", - "medical_code", - "name_family", - "name_given", - "account_number", - "event", - "filename", - "gender_sexuality", - "language", - "location_address", - "location_city", - "location_coordinate", - "location_country", - "location_state", - "location_zip", - "marital_status", - "money", - "name_medical_professional", - "occupation", - "organization", - "organization_medical_facility", - "origin", - "passport_number", - "password", - "physical_attribute", - "political_affiliation", - "religion", - "time", - "username", - "zodiac_sign", - "blood_type", - "condition", - "dose", - "drug", - "injury", - "medical_process", - "statistics", - "routing_number", - "corporate_action", - "financial_metric", - "product", - "trend", - "duration", - "location_address_street", - "all", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/types/detect_file_request_data_type.py b/skyflow/generated/rest/types/detect_file_request_data_type.py deleted file mode 100644 index 825d4778..00000000 --- a/skyflow/generated/rest/types/detect_file_request_data_type.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DetectFileRequestDataType = typing.Union[typing.Literal["UNKNOWN", "BASE64"], typing.Any] diff --git a/skyflow/generated/rest/types/detect_request_deidentify_option.py b/skyflow/generated/rest/types/detect_request_deidentify_option.py deleted file mode 100644 index caee5f16..00000000 --- a/skyflow/generated/rest/types/detect_request_deidentify_option.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DetectRequestDeidentifyOption = typing.Union[typing.Literal["UNKNOWN", "ENTITY_UNQ_COUNTER", "ENTITY_ONLY"], typing.Any] diff --git a/skyflow/generated/rest/types/entity_type.py b/skyflow/generated/rest/types/entity_type.py index 6b48f1d8..20195417 100644 --- a/skyflow/generated/rest/types/entity_type.py +++ b/skyflow/generated/rest/types/entity_type.py @@ -15,17 +15,19 @@ "credit_card_expiration", "cvv", "date", + "day", "date_interval", "dob", "dose", "driver_license", "drug", "duration", + "effect", "email_address", "event", "filename", "financial_metric", - "gender_sexuality", + "gender", "healthcare_number", "injury", "ip_address", @@ -42,6 +44,7 @@ "medical_code", "medical_process", "money", + "month", "name", "name_family", "name_given", @@ -49,16 +52,19 @@ "numerical_pii", "occupation", "organization", + "organization_id", "organization_medical_facility", "origin", "passport_number", "password", "phone_number", + "project", "physical_attribute", "political_affiliation", "product", "religion", "routing_number", + "sexuality", "ssn", "statistics", "time", @@ -66,6 +72,7 @@ "url", "username", "vehicle_id", + "year", "zodiac_sign", ], typing.Any, diff --git a/skyflow/generated/rest/types/processed_file_output_processed_file_type.py b/skyflow/generated/rest/types/processed_file_output_processed_file_type.py deleted file mode 100644 index 18758eaa..00000000 --- a/skyflow/generated/rest/types/processed_file_output_processed_file_type.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -ProcessedFileOutputProcessedFileType = typing.Union[ - typing.Literal[ - "none", - "redacted_audio", - "redacted_image", - "redacted_transcription", - "redacted_file", - "redacted_text", - "entities", - "redacted_aws_transcription_diarize_json", - "redacted_deepgram_transcription_diarize_json", - "plaintext_transcribed", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/types/v_1_pdf_options.py b/skyflow/generated/rest/types/reidentify_file_response.py similarity index 51% rename from skyflow/generated/rest/types/v_1_pdf_options.py rename to skyflow/generated/rest/types/reidentify_file_response.py index 28fdf1bc..c67b41ac 100644 --- a/skyflow/generated/rest/types/v_1_pdf_options.py +++ b/skyflow/generated/rest/types/reidentify_file_response.py @@ -4,23 +4,27 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentify_file_response_status import ReidentifyFileResponseStatus -class V1PdfOptions(UniversalBaseModel): +class ReidentifyFileResponse(UniversalBaseModel): """ - How to handle PDF files. + Response to re-identify a file. """ - density: typing.Optional[int] = pydantic.Field(default=None) + status: ReidentifyFileResponseStatus = pydantic.Field() """ - Pixel density at which to process the PDF file. + Status of the re-identify operation. """ - max_resolution: typing.Optional[int] = pydantic.Field(default=None) + output_type: typing.Literal["BASE64"] = pydantic.Field(default="BASE64") """ - Max resolution at which to process the PDF file. + Format of the output file. """ + output: ReidentifyFileResponseOutput + if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 else: diff --git a/skyflow/generated/rest/types/v_1_image_options.py b/skyflow/generated/rest/types/reidentify_file_response_output.py similarity index 57% rename from skyflow/generated/rest/types/v_1_image_options.py rename to skyflow/generated/rest/types/reidentify_file_response_output.py index 7f4143df..bda44777 100644 --- a/skyflow/generated/rest/types/v_1_image_options.py +++ b/skyflow/generated/rest/types/reidentify_file_response_output.py @@ -6,19 +6,20 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class V1ImageOptions(UniversalBaseModel): +class ReidentifyFileResponseOutput(UniversalBaseModel): + processed_file: str = pydantic.Field() """ - How to handle image files. + Re-identified file content in base64 format. """ - output_processed_image: typing.Optional[bool] = pydantic.Field(default=None) + processed_file_type: typing.Literal["reidentified_file"] = pydantic.Field(default="reidentified_file") """ - If `true`, includes processed image in the output. + Type of the processed file. """ - output_ocr_text: typing.Optional[bool] = pydantic.Field(default=None) + processed_file_extension: str = pydantic.Field() """ - If `true`, includes OCR text output in the response. + Extension of the processed file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/reidentify_file_response_status.py b/skyflow/generated/rest/types/reidentify_file_response_status.py new file mode 100644 index 00000000..c640c3a6 --- /dev/null +++ b/skyflow/generated/rest/types/reidentify_file_response_status.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ReidentifyFileResponseStatus = typing.Union[typing.Literal["failed", "in_progress", "success"], typing.Any] diff --git a/skyflow/generated/rest/types/v_1_advanced_options.py b/skyflow/generated/rest/types/v_1_advanced_options.py deleted file mode 100644 index ea893551..00000000 --- a/skyflow/generated/rest/types/v_1_advanced_options.py +++ /dev/null @@ -1,38 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ..core.serialization import FieldMetadata -from .advanced_options_vault_schema import AdvancedOptionsVaultSchema - - -class V1AdvancedOptions(UniversalBaseModel): - """ - Advanced options for post processing. - """ - - date_shift: typing.Optional[int] = pydantic.Field(default=None) - """ - No. of days by which original date has to be shifted to. - """ - - custom_client: typing.Optional[bool] = pydantic.Field(default=None) - """ - Custom client specific logic. - """ - - schema_: typing_extensions.Annotated[typing.Optional[AdvancedOptionsVaultSchema], FieldMetadata(alias="schema")] = ( - None - ) - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audio_config.py b/skyflow/generated/rest/types/v_1_audio_config.py deleted file mode 100644 index dc866e47..00000000 --- a/skyflow/generated/rest/types/v_1_audio_config.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .audio_config_transcription_type import AudioConfigTranscriptionType -from .v_1_audio_options import V1AudioOptions - - -class V1AudioConfig(UniversalBaseModel): - """ - How to handle audio files. - """ - - output_transcription: typing.Optional[AudioConfigTranscriptionType] = None - output_processed_audio: typing.Optional[bool] = pydantic.Field(default=None) - """ - If `true`, includes processed audio file in the response. - """ - - options: typing.Optional[V1AudioOptions] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_audio_options.py b/skyflow/generated/rest/types/v_1_audio_options.py deleted file mode 100644 index 6e5b3df9..00000000 --- a/skyflow/generated/rest/types/v_1_audio_options.py +++ /dev/null @@ -1,46 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel - - -class V1AudioOptions(UniversalBaseModel): - """ - Options for audio files. - """ - - bleep_start_padding: typing.Optional[float] = pydantic.Field(default=None) - """ - Padding added to the beginning of a bleep, in seconds. - """ - - bleep_end_padding: typing.Optional[float] = pydantic.Field(default=None) - """ - Padding added to the end of a bleep, in seconds. - """ - - distortion_steps: typing.Optional[int] = pydantic.Field(default=None) - """ - Specifies how the distortion will be made. Providing a number more than 0 will result in a higher tone and a coefficient less than 0 will result in a lower tone. - """ - - bleep_frequency: typing.Optional[int] = pydantic.Field(default=None) - """ - This parameter configures the frequency of the sine wave used for the bleep sound in an audio segment. - """ - - bleep_gain: typing.Optional[int] = pydantic.Field(default=None) - """ - It controls the relative loudness of the bleep,positive values increase its loudness, and negative values decrease it. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_file_response.py b/skyflow/generated/rest/types/v_1_detect_file_response.py deleted file mode 100644 index f933703e..00000000 --- a/skyflow/generated/rest/types/v_1_detect_file_response.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel - - -class V1DetectFileResponse(UniversalBaseModel): - """ - Response to deidentify a file. - """ - - status_url: typing.Optional[str] = pydantic.Field(default=None) - """ - Status URL for the deidentification request. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_status_response.py b/skyflow/generated/rest/types/v_1_detect_status_response.py deleted file mode 100644 index ac859394..00000000 --- a/skyflow/generated/rest/types/v_1_detect_status_response.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .v_1_detect_status_response_status import V1DetectStatusResponseStatus -from .v_1_processed_file_output import V1ProcessedFileOutput - - -class V1DetectStatusResponse(UniversalBaseModel): - """ - Response to get the status of a file deidentification request. - """ - - status: typing.Optional[V1DetectStatusResponseStatus] = None - output: typing.Optional[typing.List[V1ProcessedFileOutput]] = pydantic.Field(default=None) - """ - How the input file was specified. - """ - - message: typing.Optional[str] = pydantic.Field(default=None) - """ - Status details about the deidentification request. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_status_response_status.py b/skyflow/generated/rest/types/v_1_detect_status_response_status.py deleted file mode 100644 index 1b9531cb..00000000 --- a/skyflow/generated/rest/types/v_1_detect_status_response_status.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -V1DetectStatusResponseStatus = typing.Union[typing.Literal["UNKNOWN", "FAILED", "SUCCESS", "IN_PROGRESS"], typing.Any] diff --git a/skyflow/generated/rest/types/v_1_detect_text_request.py b/skyflow/generated/rest/types/v_1_detect_text_request.py deleted file mode 100644 index f832ef7b..00000000 --- a/skyflow/generated/rest/types/v_1_detect_text_request.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .detect_data_accuracy import DetectDataAccuracy -from .detect_data_entities import DetectDataEntities -from .detect_request_deidentify_option import DetectRequestDeidentifyOption -from .v_1_advanced_options import V1AdvancedOptions - - -class V1DetectTextRequest(UniversalBaseModel): - """ - Request to deidentify a string. - """ - - text: str = pydantic.Field() - """ - Data to deidentify. - """ - - vault_id: str = pydantic.Field() - """ - ID of the vault. - """ - - session_id: typing.Optional[str] = pydantic.Field(default=None) - """ - Will give a handle to delete the tokens generated during a specific interaction. - """ - - restrict_entity_types: typing.Optional[typing.List[DetectDataEntities]] = pydantic.Field(default=None) - """ - Entities to detect and deidentify. - """ - - deidentify_token_format: typing.Optional[DetectRequestDeidentifyOption] = None - allow_regex: typing.Optional[typing.List[str]] = pydantic.Field(default=None) - """ - Regular expressions to ignore when detecting entities. - """ - - restrict_regex: typing.Optional[typing.List[str]] = pydantic.Field(default=None) - """ - Regular expressions to always restrict. Strings matching these regular expressions are replaced with 'RESTRICTED'. - """ - - return_entities: typing.Optional[bool] = pydantic.Field(default=None) - """ - If `true`, returns the details for the detected entities. - """ - - accuracy: typing.Optional[DetectDataAccuracy] = None - advanced_options: typing.Optional[V1AdvancedOptions] = None - store_entities: typing.Optional[bool] = pydantic.Field(default=None) - """ - Indicates whether entities should be stored in the vault. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_detect_text_response.py b/skyflow/generated/rest/types/v_1_detect_text_response.py deleted file mode 100644 index 954e7d07..00000000 --- a/skyflow/generated/rest/types/v_1_detect_text_response.py +++ /dev/null @@ -1,32 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .v_1_response_entities import V1ResponseEntities - - -class V1DetectTextResponse(UniversalBaseModel): - """ - Response to deidentify a string. - """ - - processed_text: typing.Optional[str] = pydantic.Field(default=None) - """ - Deidentified text. If the input was a file, text that was extracted or transcribed from the file and deidentified. - """ - - entities: typing.Optional[typing.List[V1ResponseEntities]] = pydantic.Field(default=None) - """ - Detected entities. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_file_data_format.py b/skyflow/generated/rest/types/v_1_file_data_format.py deleted file mode 100644 index f717c793..00000000 --- a/skyflow/generated/rest/types/v_1_file_data_format.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -V1FileDataFormat = typing.Union[ - typing.Literal[ - "bmp", - "csv", - "doc", - "docx", - "jpeg", - "jpg", - "json", - "mp3", - "pdf", - "png", - "ppt", - "pptx", - "tif", - "tiff", - "txt", - "unknown", - "wav", - "xls", - "xlsx", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/types/v_1_locations.py b/skyflow/generated/rest/types/v_1_locations.py deleted file mode 100644 index 098d6b6e..00000000 --- a/skyflow/generated/rest/types/v_1_locations.py +++ /dev/null @@ -1,41 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel - - -class V1Locations(UniversalBaseModel): - """ - Locations of an entity in the text. - """ - - start_index: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the first character of the string in the original text. - """ - - end_index: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the last character of the string in the original text. - """ - - start_index_processed: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the first character of the string in the processed text. - """ - - end_index_processed: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the last character of the string in the processed text. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_pdf_config.py b/skyflow/generated/rest/types/v_1_pdf_config.py deleted file mode 100644 index eff6107e..00000000 --- a/skyflow/generated/rest/types/v_1_pdf_config.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .v_1_pdf_options import V1PdfOptions - - -class V1PdfConfig(UniversalBaseModel): - """ - How to handle PDF files. - """ - - options: typing.Optional[V1PdfOptions] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_processed_file_output.py b/skyflow/generated/rest/types/v_1_processed_file_output.py deleted file mode 100644 index 80968814..00000000 --- a/skyflow/generated/rest/types/v_1_processed_file_output.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .detect_file_request_data_type import DetectFileRequestDataType -from .processed_file_output_processed_file_type import ProcessedFileOutputProcessedFileType - - -class V1ProcessedFileOutput(UniversalBaseModel): - """ - Contains details and contents of the processed file. - """ - - output_type: typing.Optional[DetectFileRequestDataType] = None - processed_file: typing.Optional[str] = pydantic.Field(default=None) - """ - URL or base64-encoded data of the output. - """ - - processed_file_type: typing.Optional[ProcessedFileOutputProcessedFileType] = None - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/v_1_response_entities.py b/skyflow/generated/rest/types/v_1_response_entities.py deleted file mode 100644 index fbc0ecf4..00000000 --- a/skyflow/generated/rest/types/v_1_response_entities.py +++ /dev/null @@ -1,43 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .v_1_locations import V1Locations - - -class V1ResponseEntities(UniversalBaseModel): - """ - Detected entities. - """ - - processed_text: typing.Optional[str] = pydantic.Field(default=None) - """ - Processed text of the entity. - """ - - original_text: typing.Optional[str] = pydantic.Field(default=None) - """ - Original text of the entity. - """ - - location: typing.Optional[V1Locations] = None - best_label: typing.Optional[str] = pydantic.Field(default=None) - """ - Highest rated label. - """ - - labels: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) - """ - Labels and their scores. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 343924ba..b9330d56 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b7' \ No newline at end of file +SDK_VERSION = '2.0.0b7.dev0+b8744bb' \ No newline at end of file diff --git a/skyflow/utils/enums/detect_entities.py b/skyflow/utils/enums/detect_entities.py index 23b36bdc..91d5e1a2 100644 --- a/skyflow/utils/enums/detect_entities.py +++ b/skyflow/utils/enums/detect_entities.py @@ -12,17 +12,19 @@ class DetectEntities(Enum): CREDIT_CARD_EXPIRATION = "credit_card_expiration" CVV = "cvv" DATE = "date" + DAY = "day" DATE_INTERVAL = "date_interval" DOB = "dob" DOSE = "dose" DRIVER_LICENSE = "driver_license" DRUG = "drug" DURATION = "duration" + EFFECT = "effect" EMAIL_ADDRESS = "email_address" EVENT = "event" FILENAME = "filename" FINANCIAL_METRIC = "financial_metric" - GENDER_SEXUALITY = "gender_sexuality" + GENDER = "gender" HEALTHCARE_NUMBER = "healthcare_number" INJURY = "injury" IP_ADDRESS = "ip_address" @@ -39,6 +41,7 @@ class DetectEntities(Enum): MEDICAL_CODE = "medical_code" MEDICAL_PROCESS = "medical_process" MONEY = "money" + MONTH = "month" NAME = "name" NAME_FAMILY = "name_family" NAME_GIVEN = "name_given" @@ -46,16 +49,19 @@ class DetectEntities(Enum): NUMERICAL_PII = "numerical_pii" OCCUPATION = "occupation" ORGANIZATION = "organization" + ORGANIZATION_ID = "organization_id" ORGANIZATION_MEDICAL_FACILITY = "organization_medical_facility" ORIGIN = "origin" PASSPORT_NUMBER = "passport_number" PASSWORD = "password" PHONE_NUMBER = "phone_number" + PROJECT = "project" PHYSICAL_ATTRIBUTE = "physical_attribute" POLITICAL_AFFILIATION = "political_affiliation" PRODUCT = "product" RELIGION = "religion" ROUTING_NUMBER = "routing_number" + SEXUALITY = "sexuality" SSN = "ssn" STATISTICS = "statistics" TIME = "time" @@ -63,4 +69,5 @@ class DetectEntities(Enum): URL = "url" USERNAME = "username" VEHICLE_ID = "vehicle_id" + YEAR = "year" ZODIAC_SIGN = "zodiac_sign" \ No newline at end of file diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py index 606d58ef..93fac69e 100644 --- a/skyflow/vault/controller/_detect.py +++ b/skyflow/vault/controller/_detect.py @@ -1,10 +1,6 @@ import io import json import os -from skyflow.error import SkyflowError -from skyflow.generated.rest.types.token_type import TokenType -from skyflow.generated.rest.types.transformations import Transformations -from skyflow.generated.rest.types.transformations_shift_dates import TransformationsShiftDates import base64 import time from skyflow.generated.rest import DeidentifyTextRequestFile, DeidentifyAudioRequestFile, DeidentifyPdfRequestFile, \ @@ -88,19 +84,27 @@ def __poll_for_processed_file(self, run_id, max_wait_time=64): raise e def __parse_deidentify_file_response(self, data, run_id=None, status=None): - output = getattr(data, "output", []) - output_type = getattr(data, "output_type", None) - word_character_count = getattr(data, "word_character_count", None) + status_val = getattr(data, "status", None) or status + run_id_val = getattr(data, "run_id", None) or run_id + + word_count = None + char_count = None + + word_character_count = getattr(data, "wordCharacterCount", None) + if word_character_count and isinstance(word_character_count, dict): + word_count = word_character_count.get("wordCount") + char_count = word_character_count.get("characterCount") + size = getattr(data, "size", None) + + size = float(size) if size is not None else None + duration = getattr(data, "duration", None) pages = getattr(data, "pages", None) slides = getattr(data, "slides", None) - message = getattr(data, "message", None) - status_val = getattr(data, "status", None) or status - run_id_val = getattr(data, "run_id", None) or run_id - # Convert output to list of dicts if it's a list of objects + # Convert output to list of dicts, prefer camelCase keys def output_to_dict_list(output): result = [] for o in output: @@ -112,9 +116,11 @@ def output_to_dict_list(output): }) else: result.append({ - "file": getattr(o, "processed_file", None), - "type": getattr(o, "processed_file_type", None), - "extension": getattr(o, "processed_file_extension", None) + "file": getattr(o, "processedFile", None) or getattr(o, "processed_file", None), + "type": getattr(o, "processedFileType", None) or getattr(o, "processed_file_type", None), + "extension": getattr(o, "processedFileExtension", None) or getattr(o, + "processed_file_extension", + None) }) return result @@ -123,13 +129,9 @@ def output_to_dict_list(output): entities = [o for o in output_list if o.get("type") == "entities"] - word_count = getattr(word_character_count, "word_count", None) - char_count = getattr(word_character_count, "character_count", None) - base64_string = first_output.get("file", None) extension = first_output.get("extension", None) - file_obj = None if base64_string is not None: file_bytes = base64.b64decode(base64_string) file_obj = io.BytesIO(file_bytes) diff --git a/tests/vault/controller/test__detect.py b/tests/vault/controller/test__detect.py index 1352f85b..3096ce08 100644 --- a/tests/vault/controller/test__detect.py +++ b/tests/vault/controller/test__detect.py @@ -42,8 +42,8 @@ def test_deidentify_text_success(self, mock_parse_response, mock_validate): 'scores': {'confidence': 0.9} } ], - 'word_count': 4, - 'char_count': 20 + 'wordCount': 4, + 'charCount': 20 } # Create request @@ -149,7 +149,7 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [] - processed_response.word_character_count = Mock(word_count=1, character_count=1) + processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", @@ -212,7 +212,7 @@ def test_deidentify_file_audio_success(self, mock_base64, mock_validate): processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [] - processed_response.word_character_count = Mock(word_count=1, character_count=1) + processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", @@ -257,7 +257,7 @@ def test_get_detect_run_success(self, mock_validate): response = Mock() response.status = "SUCCESS" response.output = [] - response.word_character_count = Mock(word_count=1, character_count=1) + response.wordCharacterCount = Mock(wordCount=1, characterCount=1) files_api.get_run.return_value = response with patch.object(self.detect, "_Detect__parse_deidentify_file_response", return_value=DeidentifyFileResponse(file="file", type="txt", extension="txt", word_count=1, @@ -299,13 +299,14 @@ def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, processed_response.output = [ {"processedFile": "dGVzdCBjb250ZW50", "processedFileType": "pdf", "processedFileExtension": "pdf"} ] - processed_response.word_character_count = Mock(word_count=1, character_count=1) + processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) processed_response.size = 1 processed_response.duration = 1 processed_response.pages = 1 processed_response.slides = 1 processed_response.message = "" processed_response.run_id = "runid123" + processed_response.wordCharacterCount = {"wordCount": 1, "characterCount": 1} mock_poll.return_value = processed_response # Test configuration for different file types @@ -441,7 +442,7 @@ def test_parse_deidentify_file_response_dict_and_obj(self): {"processedFile": "YWJj", "processedFileType": "pdf", "processedFileExtension": "pdf"}, # base64 for "abc" {"processedFile": "ZGVm", "processedFileType": "entities", "processedFileExtension": "json"} # base64 for "def" ], - "word_character_count": {"word_count": 5, "character_count": 10}, + "wordCharacterCount": {"wordCount": 5, "characterCount": 10}, "size": 1, "duration": 2, "pages": 3, @@ -454,8 +455,8 @@ def test_parse_deidentify_file_response_dict_and_obj(self): # Object input class DummyWordChar: - word_count = 7 - character_count = 14 + wordCount = 7 + characterCount = 14 class DummyData: output = [ @@ -605,7 +606,8 @@ class OutputObj: data = Mock() data.output = [OutputObj()] - data.word_character_count = Mock(word_count=1, character_count=1) + data.size = 1 + data.wordCharacterCount = Mock(wordCount=1, characterCount=1) result = self.detect._Detect__parse_deidentify_file_response(data) @@ -657,7 +659,7 @@ def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_ba processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [] - processed_response.word_character_count = Mock(word_count=1, character_count=1) + processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) # Test the method with patch.object(self.detect, "_Detect__poll_for_processed_file", From 1a640c85043db640d482494420374690f311de49 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 24 Jul 2025 14:44:21 +0000 Subject: [PATCH 39/60] [AUTOMATED] Public Release - 2.1.0b1 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index c9707d32..a205e472 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0b7.dev0+b8744bb' +current_version = '2.1.0b1' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index b9330d56..2e213012 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0b7.dev0+b8744bb' \ No newline at end of file +SDK_VERSION = '2.1.0b1' \ No newline at end of file From 61e368f12f2a9aa6cfcf09e1cc113e7ff4be31ba Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Wed, 10 Sep 2025 19:39:40 +0530 Subject: [PATCH 40/60] SK-971 file upload support (#191) * SK-971 file upload support --- skyflow/generated/rest/__init__.py | 6 +- skyflow/generated/rest/core/client_wrapper.py | 2 +- skyflow/generated/rest/files/client.py | 16 +- skyflow/generated/rest/files/raw_client.py | 16 +- ...deidentify_image_request_masking_method.py | 2 +- skyflow/generated/rest/guardrails/client.py | 12 +- skyflow/generated/rest/records/client.py | 141 +++++++++ skyflow/generated/rest/records/raw_client.py | 239 +++++++++++++++ skyflow/generated/rest/types/__init__.py | 6 +- .../rest/types/deidentify_status_response.py | 4 +- .../deidentify_status_response_output_type.py | 2 +- .../deidentify_status_response_status.py | 2 +- skyflow/generated/rest/types/entity_type.py | 4 +- skyflow/generated/rest/types/error_string.py | 3 - .../rest/types/reidentify_file_response.py | 3 +- .../reidentify_file_response_output_type.py | 5 + .../types/reidentify_file_response_status.py | 2 +- .../rest/types/reidentify_string_response.py | 2 +- .../rest/types/upload_file_v_2_response.py | 34 +++ skyflow/utils/_skyflow_messages.py | 9 + skyflow/utils/validations/__init__.py | 1 + skyflow/utils/validations/_validations.py | 64 ++++ skyflow/vault/controller/_vault.py | 57 +++- skyflow/vault/data/__init__.py | 4 +- skyflow/vault/data/_file_upload_request.py | 18 ++ skyflow/vault/data/_file_upload_response.py | 6 + tests/vault/controller/test__vault.py | 282 +++++++++++++++++- 27 files changed, 893 insertions(+), 49 deletions(-) delete mode 100644 skyflow/generated/rest/types/error_string.py create mode 100644 skyflow/generated/rest/types/reidentify_file_response_output_type.py create mode 100644 skyflow/generated/rest/types/upload_file_v_2_response.py create mode 100644 skyflow/vault/data/_file_upload_request.py create mode 100644 skyflow/vault/data/_file_upload_response.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index bad57c24..b8309d05 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -28,12 +28,12 @@ EntityTypes, ErrorResponse, ErrorResponseError, - ErrorString, GooglerpcStatus, ProtobufAny, RedactionEnumRedaction, ReidentifyFileResponse, ReidentifyFileResponseOutput, + ReidentifyFileResponseOutputType, ReidentifyFileResponseStatus, ReidentifyStringResponse, RequestActionType, @@ -46,6 +46,7 @@ Transformations, TransformationsShiftDates, TransformationsShiftDatesEntityTypesItem, + UploadFileV2Response, Uuid, V1AuditAfterOptions, V1AuditEventResponse, @@ -175,7 +176,6 @@ "EntityTypes", "ErrorResponse", "ErrorResponseError", - "ErrorString", "GooglerpcStatus", "InternalServerError", "NotFoundError", @@ -189,6 +189,7 @@ "ReidentifyFileRequestFormat", "ReidentifyFileResponse", "ReidentifyFileResponseOutput", + "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", "ReidentifyStringRequestFormat", "ReidentifyStringResponse", @@ -205,6 +206,7 @@ "TransformationsShiftDates", "TransformationsShiftDatesEntityTypesItem", "UnauthorizedError", + "UploadFileV2Response", "Uuid", "V1AuditAfterOptions", "V1AuditEventResponse", diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index a3210a7e..5179f373 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -24,7 +24,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "skyflow_vault", - "X-Fern-SDK-Version": "0.0.252", + "X-Fern-SDK-Version": "0.0.323", **(self.get_custom_headers() or {}), } headers["Authorization"] = f"Bearer {self._get_token()}" diff --git a/skyflow/generated/rest/files/client.py b/skyflow/generated/rest/files/client.py index 654789de..4d5d548b 100644 --- a/skyflow/generated/rest/files/client.py +++ b/skyflow/generated/rest/files/client.py @@ -200,8 +200,8 @@ def deidentify_pdf( vault_id: VaultId, file: DeidentifyPdfRequestFile, configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[int] = OMIT, - max_resolution: typing.Optional[int] = OMIT, + density: typing.Optional[float] = OMIT, + max_resolution: typing.Optional[float] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -221,10 +221,10 @@ def deidentify_pdf( configuration_id : typing.Optional[ConfigurationId] - density : typing.Optional[int] + density : typing.Optional[float] Pixel density at which to process the PDF file. - max_resolution : typing.Optional[int] + max_resolution : typing.Optional[float] Max resolution at which to process the PDF file. entity_types : typing.Optional[EntityTypes] @@ -1020,8 +1020,8 @@ async def deidentify_pdf( vault_id: VaultId, file: DeidentifyPdfRequestFile, configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[int] = OMIT, - max_resolution: typing.Optional[int] = OMIT, + density: typing.Optional[float] = OMIT, + max_resolution: typing.Optional[float] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1041,10 +1041,10 @@ async def deidentify_pdf( configuration_id : typing.Optional[ConfigurationId] - density : typing.Optional[int] + density : typing.Optional[float] Pixel density at which to process the PDF file. - max_resolution : typing.Optional[int] + max_resolution : typing.Optional[float] Max resolution at which to process the PDF file. entity_types : typing.Optional[EntityTypes] diff --git a/skyflow/generated/rest/files/raw_client.py b/skyflow/generated/rest/files/raw_client.py index 5a67292f..c0e535ea 100644 --- a/skyflow/generated/rest/files/raw_client.py +++ b/skyflow/generated/rest/files/raw_client.py @@ -287,8 +287,8 @@ def deidentify_pdf( vault_id: VaultId, file: DeidentifyPdfRequestFile, configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[int] = OMIT, - max_resolution: typing.Optional[int] = OMIT, + density: typing.Optional[float] = OMIT, + max_resolution: typing.Optional[float] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -308,10 +308,10 @@ def deidentify_pdf( configuration_id : typing.Optional[ConfigurationId] - density : typing.Optional[int] + density : typing.Optional[float] Pixel density at which to process the PDF file. - max_resolution : typing.Optional[int] + max_resolution : typing.Optional[float] Max resolution at which to process the PDF file. entity_types : typing.Optional[EntityTypes] @@ -1575,8 +1575,8 @@ async def deidentify_pdf( vault_id: VaultId, file: DeidentifyPdfRequestFile, configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[int] = OMIT, - max_resolution: typing.Optional[int] = OMIT, + density: typing.Optional[float] = OMIT, + max_resolution: typing.Optional[float] = OMIT, entity_types: typing.Optional[EntityTypes] = OMIT, token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, allow_regex: typing.Optional[AllowRegex] = OMIT, @@ -1596,10 +1596,10 @@ async def deidentify_pdf( configuration_id : typing.Optional[ConfigurationId] - density : typing.Optional[int] + density : typing.Optional[float] Pixel density at which to process the PDF file. - max_resolution : typing.Optional[int] + max_resolution : typing.Optional[float] Max resolution at which to process the PDF file. entity_types : typing.Optional[EntityTypes] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py index d1ff8c83..bc0c338c 100644 --- a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py +++ b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py @@ -2,4 +2,4 @@ import typing -DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackout", "blur"], typing.Any] +DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackbox", "blur"], typing.Any] diff --git a/skyflow/generated/rest/guardrails/client.py b/skyflow/generated/rest/guardrails/client.py index 169f7de1..e7fe1e05 100644 --- a/skyflow/generated/rest/guardrails/client.py +++ b/skyflow/generated/rest/guardrails/client.py @@ -68,10 +68,8 @@ def check_guardrails( token="YOUR_TOKEN", ) client.guardrails.check_guardrails( - vault_id="VAULT_ID", - text="I love to play cricket.", - check_toxicity=True, - deny_topics=["sports"], + vault_id="vault_id", + text="text", ) """ _response = self._raw_client.check_guardrails( @@ -145,10 +143,8 @@ async def check_guardrails( async def main() -> None: await client.guardrails.check_guardrails( - vault_id="VAULT_ID", - text="I love to play cricket.", - check_toxicity=True, - deny_topics=["sports"], + vault_id="vault_id", + text="text", ) diff --git a/skyflow/generated/rest/records/client.py b/skyflow/generated/rest/records/client.py index 1f727bfc..cfe15a1c 100644 --- a/skyflow/generated/rest/records/client.py +++ b/skyflow/generated/rest/records/client.py @@ -5,6 +5,7 @@ from .. import core from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from ..types.upload_file_v_2_response import UploadFileV2Response from ..types.v_1_batch_operation_response import V1BatchOperationResponse from ..types.v_1_batch_record import V1BatchRecord from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse @@ -700,6 +701,72 @@ def file_service_get_file_scan_status( ) return _response.data + def upload_file_v_2( + self, + vault_id: str, + *, + table_name: str, + column_name: str, + file: core.File, + skyflow_id: typing.Optional[str] = OMIT, + return_file_metadata: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> UploadFileV2Response: + """ + Uploads the specified file to a record. If an existing record isn't specified, creates a new record and uploads the file to that record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table to upload the file to. + + column_name : str + Name of the column to upload the file to. The column must have a `file` data type. + + file : core.File + See core.File for more documentation + + skyflow_id : typing.Optional[str] + Skyflow ID of the record to upload the file to. If `skyflowID` isn't specified, a new record will be created. + + return_file_metadata : typing.Optional[bool] + If `true`, returns metadata about the uploaded file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + UploadFileV2Response + File uploaded successfully. + + Examples + -------- + from skyflow import Skyflow + + client = Skyflow( + token="YOUR_TOKEN", + ) + client.records.upload_file_v_2( + vault_id="d4410ea01d83473ca09a24c6b03096d4", + table_name="tableName", + column_name="columnName", + ) + """ + _response = self._raw_client.upload_file_v_2( + vault_id, + table_name=table_name, + column_name=column_name, + file=file, + skyflow_id=skyflow_id, + return_file_metadata=return_file_metadata, + request_options=request_options, + ) + return _response.data + class AsyncRecordsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -1455,3 +1522,77 @@ async def main() -> None: vault_id, table_name, id, column_name, request_options=request_options ) return _response.data + + async def upload_file_v_2( + self, + vault_id: str, + *, + table_name: str, + column_name: str, + file: core.File, + skyflow_id: typing.Optional[str] = OMIT, + return_file_metadata: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> UploadFileV2Response: + """ + Uploads the specified file to a record. If an existing record isn't specified, creates a new record and uploads the file to that record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table to upload the file to. + + column_name : str + Name of the column to upload the file to. The column must have a `file` data type. + + file : core.File + See core.File for more documentation + + skyflow_id : typing.Optional[str] + Skyflow ID of the record to upload the file to. If `skyflowID` isn't specified, a new record will be created. + + return_file_metadata : typing.Optional[bool] + If `true`, returns metadata about the uploaded file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + UploadFileV2Response + File uploaded successfully. + + Examples + -------- + import asyncio + + from skyflow import AsyncSkyflow + + client = AsyncSkyflow( + token="YOUR_TOKEN", + ) + + + async def main() -> None: + await client.records.upload_file_v_2( + vault_id="d4410ea01d83473ca09a24c6b03096d4", + table_name="tableName", + column_name="columnName", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.upload_file_v_2( + vault_id, + table_name=table_name, + column_name=column_name, + file=file, + skyflow_id=skyflow_id, + return_file_metadata=return_file_metadata, + request_options=request_options, + ) + return _response.data diff --git a/skyflow/generated/rest/records/raw_client.py b/skyflow/generated/rest/records/raw_client.py index e2bfdc92..b42e0bc9 100644 --- a/skyflow/generated/rest/records/raw_client.py +++ b/skyflow/generated/rest/records/raw_client.py @@ -11,7 +11,12 @@ from ..core.pydantic_utilities import parse_obj_as from ..core.request_options import RequestOptions from ..core.serialization import convert_and_respect_annotation_metadata +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError from ..errors.not_found_error import NotFoundError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.error_response import ErrorResponse +from ..types.upload_file_v_2_response import UploadFileV2Response from ..types.v_1_batch_operation_response import V1BatchOperationResponse from ..types.v_1_batch_record import V1BatchRecord from ..types.v_1_bulk_delete_record_response import V1BulkDeleteRecordResponse @@ -804,6 +809,123 @@ def file_service_get_file_scan_status( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + def upload_file_v_2( + self, + vault_id: str, + *, + table_name: str, + column_name: str, + file: core.File, + skyflow_id: typing.Optional[str] = OMIT, + return_file_metadata: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[UploadFileV2Response]: + """ + Uploads the specified file to a record. If an existing record isn't specified, creates a new record and uploads the file to that record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table to upload the file to. + + column_name : str + Name of the column to upload the file to. The column must have a `file` data type. + + file : core.File + See core.File for more documentation + + skyflow_id : typing.Optional[str] + Skyflow ID of the record to upload the file to. If `skyflowID` isn't specified, a new record will be created. + + return_file_metadata : typing.Optional[bool] + If `true`, returns metadata about the uploaded file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[UploadFileV2Response] + File uploaded successfully. + """ + _response = self._client_wrapper.httpx_client.request( + f"v2/vaults/{jsonable_encoder(vault_id)}/files/upload", + method="POST", + data={ + "tableName": table_name, + "columnName": column_name, + "skyflowID": skyflow_id, + "returnFileMetadata": return_file_metadata, + }, + files={ + "file": file, + }, + request_options=request_options, + omit=OMIT, + force_multipart=True, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UploadFileV2Response, + parse_obj_as( + type_=UploadFileV2Response, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + class AsyncRawRecordsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -1577,3 +1699,120 @@ async def file_service_get_file_scan_status( except JSONDecodeError: raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def upload_file_v_2( + self, + vault_id: str, + *, + table_name: str, + column_name: str, + file: core.File, + skyflow_id: typing.Optional[str] = OMIT, + return_file_metadata: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[UploadFileV2Response]: + """ + Uploads the specified file to a record. If an existing record isn't specified, creates a new record and uploads the file to that record. + + Parameters + ---------- + vault_id : str + ID of the vault. + + table_name : str + Name of the table to upload the file to. + + column_name : str + Name of the column to upload the file to. The column must have a `file` data type. + + file : core.File + See core.File for more documentation + + skyflow_id : typing.Optional[str] + Skyflow ID of the record to upload the file to. If `skyflowID` isn't specified, a new record will be created. + + return_file_metadata : typing.Optional[bool] + If `true`, returns metadata about the uploaded file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[UploadFileV2Response] + File uploaded successfully. + """ + _response = await self._client_wrapper.httpx_client.request( + f"v2/vaults/{jsonable_encoder(vault_id)}/files/upload", + method="POST", + data={ + "tableName": table_name, + "columnName": column_name, + "skyflowID": skyflow_id, + "returnFileMetadata": return_file_metadata, + }, + files={ + "file": file, + }, + request_options=request_options, + omit=OMIT, + force_multipart=True, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UploadFileV2Response, + parse_obj_as( + type_=UploadFileV2Response, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + ErrorResponse, + parse_obj_as( + type_=ErrorResponse, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index 5a48e4f4..92d826c9 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -27,12 +27,12 @@ from .entity_types import EntityTypes from .error_response import ErrorResponse from .error_response_error import ErrorResponseError -from .error_string import ErrorString from .googlerpc_status import GooglerpcStatus from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction from .reidentify_file_response import ReidentifyFileResponse from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus from .reidentify_string_response import ReidentifyStringResponse from .request_action_type import RequestActionType @@ -45,6 +45,7 @@ from .transformations import Transformations from .transformations_shift_dates import TransformationsShiftDates from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem +from .upload_file_v_2_response import UploadFileV2Response from .uuid_ import Uuid from .v_1_audit_after_options import V1AuditAfterOptions from .v_1_audit_event_response import V1AuditEventResponse @@ -105,12 +106,12 @@ "EntityTypes", "ErrorResponse", "ErrorResponseError", - "ErrorString", "GooglerpcStatus", "ProtobufAny", "RedactionEnumRedaction", "ReidentifyFileResponse", "ReidentifyFileResponseOutput", + "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", "ReidentifyStringResponse", "RequestActionType", @@ -123,6 +124,7 @@ "Transformations", "TransformationsShiftDates", "TransformationsShiftDatesEntityTypesItem", + "UploadFileV2Response", "Uuid", "V1AuditAfterOptions", "V1AuditEventResponse", diff --git a/skyflow/generated/rest/types/deidentify_status_response.py b/skyflow/generated/rest/types/deidentify_status_response.py index a276963c..712a85b2 100644 --- a/skyflow/generated/rest/types/deidentify_status_response.py +++ b/skyflow/generated/rest/types/deidentify_status_response.py @@ -24,7 +24,7 @@ class DeidentifyStatusResponse(UniversalBaseModel): How the input file was specified. """ - output_type: typing.Optional[DeidentifyStatusResponseOutputType] = pydantic.Field(default=None) + output_type: DeidentifyStatusResponseOutputType = pydantic.Field() """ How the output file is specified. """ @@ -49,7 +49,7 @@ class DeidentifyStatusResponse(UniversalBaseModel): Size of the processed text in kilobytes (KB). """ - duration: typing.Optional[int] = pydantic.Field(default=None) + duration: typing.Optional[float] = pydantic.Field(default=None) """ Duration of the processed audio in seconds. """ diff --git a/skyflow/generated/rest/types/deidentify_status_response_output_type.py b/skyflow/generated/rest/types/deidentify_status_response_output_type.py index 571801c1..051cc31a 100644 --- a/skyflow/generated/rest/types/deidentify_status_response_output_type.py +++ b/skyflow/generated/rest/types/deidentify_status_response_output_type.py @@ -2,4 +2,4 @@ import typing -DeidentifyStatusResponseOutputType = typing.Union[typing.Literal["base64", "efs_path"], typing.Any] +DeidentifyStatusResponseOutputType = typing.Union[typing.Literal["BASE64", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/deidentify_status_response_status.py b/skyflow/generated/rest/types/deidentify_status_response_status.py index 40262092..9ec2931b 100644 --- a/skyflow/generated/rest/types/deidentify_status_response_status.py +++ b/skyflow/generated/rest/types/deidentify_status_response_status.py @@ -2,4 +2,4 @@ import typing -DeidentifyStatusResponseStatus = typing.Union[typing.Literal["failed", "in_progress", "success"], typing.Any] +DeidentifyStatusResponseStatus = typing.Union[typing.Literal["FAILED", "IN_PROGRESS", "SUCCESS", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/entity_type.py b/skyflow/generated/rest/types/entity_type.py index 20195417..1a343410 100644 --- a/skyflow/generated/rest/types/entity_type.py +++ b/skyflow/generated/rest/types/entity_type.py @@ -15,8 +15,8 @@ "credit_card_expiration", "cvv", "date", - "day", "date_interval", + "day", "dob", "dose", "driver_license", @@ -58,10 +58,10 @@ "passport_number", "password", "phone_number", - "project", "physical_attribute", "political_affiliation", "product", + "project", "religion", "routing_number", "sexuality", diff --git a/skyflow/generated/rest/types/error_string.py b/skyflow/generated/rest/types/error_string.py deleted file mode 100644 index 068b4a84..00000000 --- a/skyflow/generated/rest/types/error_string.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -ErrorString = str diff --git a/skyflow/generated/rest/types/reidentify_file_response.py b/skyflow/generated/rest/types/reidentify_file_response.py index c67b41ac..bd90fb49 100644 --- a/skyflow/generated/rest/types/reidentify_file_response.py +++ b/skyflow/generated/rest/types/reidentify_file_response.py @@ -5,6 +5,7 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus @@ -18,7 +19,7 @@ class ReidentifyFileResponse(UniversalBaseModel): Status of the re-identify operation. """ - output_type: typing.Literal["BASE64"] = pydantic.Field(default="BASE64") + output_type: ReidentifyFileResponseOutputType = pydantic.Field() """ Format of the output file. """ diff --git a/skyflow/generated/rest/types/reidentify_file_response_output_type.py b/skyflow/generated/rest/types/reidentify_file_response_output_type.py new file mode 100644 index 00000000..03048c85 --- /dev/null +++ b/skyflow/generated/rest/types/reidentify_file_response_output_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ReidentifyFileResponseOutputType = typing.Union[typing.Literal["BASE64", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/reidentify_file_response_status.py b/skyflow/generated/rest/types/reidentify_file_response_status.py index c640c3a6..8bdfa1e0 100644 --- a/skyflow/generated/rest/types/reidentify_file_response_status.py +++ b/skyflow/generated/rest/types/reidentify_file_response_status.py @@ -2,4 +2,4 @@ import typing -ReidentifyFileResponseStatus = typing.Union[typing.Literal["failed", "in_progress", "success"], typing.Any] +ReidentifyFileResponseStatus = typing.Union[typing.Literal["FAILED", "IN_PROGRESS", "SUCCESS", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/reidentify_string_response.py b/skyflow/generated/rest/types/reidentify_string_response.py index 8284806b..cbb1b836 100644 --- a/skyflow/generated/rest/types/reidentify_string_response.py +++ b/skyflow/generated/rest/types/reidentify_string_response.py @@ -11,7 +11,7 @@ class ReidentifyStringResponse(UniversalBaseModel): Re-identify string response. """ - processed_text: typing.Optional[str] = pydantic.Field(default=None) + text: typing.Optional[str] = pydantic.Field(default=None) """ Re-identified text. """ diff --git a/skyflow/generated/rest/types/upload_file_v_2_response.py b/skyflow/generated/rest/types/upload_file_v_2_response.py new file mode 100644 index 00000000..f1bcc215 --- /dev/null +++ b/skyflow/generated/rest/types/upload_file_v_2_response.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata + + +class UploadFileV2Response(UniversalBaseModel): + """ + Response schema for uploading a file, optionally creating a new record. + """ + + skyflow_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="skyflowID")] = pydantic.Field( + default=None + ) + """ + Skyflow ID of the record the file was uploaded to. + """ + + file_metadata: typing_extensions.Annotated[ + typing.Optional[typing.Optional[typing.Any]], FieldMetadata(alias="fileMetadata") + ] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index 460ca29e..8401aeb6 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -100,6 +100,8 @@ class Error(Enum): INVALID_TABLE_VALUE = f"{error_prefix} Validation error. Invalid type of table. Specify table as a string" EMPTY_RECORD_IDS_IN_DELETE = f"{error_prefix} Validation error. 'record ids' array can't be empty. Specify one or more record ids." BULK_DELETE_FAILURE = f"{error_prefix} Delete operation failed." + EMPTY_SKYFLOW_ID= f"{error_prefix} Validation error. skyflow_id can't be empty." + INVALID_FILE_COLUMN_NAME= f"{error_prefix} Validation error. 'column_name' can't be empty." INVALID_QUERY_TYPE = f"{error_prefix} Validation error. Query parameter is of type {{}}. Specify as a string." EMPTY_QUERY = f"{error_prefix} Validation error. Query parameter can't be empty. Specify as a string." @@ -198,6 +200,7 @@ class Error(Enum): INVALID_FILE_OR_ENCODED_FILE= f"{error_prefix} . Error while decoding base64 and saving file" INVALID_FILE_TYPE = f"{error_prefix} Validation error. Invalid file type. Specify a valid file type." INVALID_FILE_NAME= f"{error_prefix} Validation error. Invalid file name. Specify a valid file name." + INVALID_FILE_PATH= f"{error_prefix} Validation error. Invalid file path. Specify a valid file path." INVALID_DEIDENTIFY_FILE_PATH= f"{error_prefix} Validation error. Invalid file path. Specify a valid file path." INVALID_BASE64_HEADER= f"{error_prefix} Validation error. Invalid base64 header. Specify a valid base64 header." INVALID_WAIT_TIME= f"{error_prefix} Validation error. Invalid wait time. Specify a valid wait time as number and should not be greater than 64 secs." @@ -271,6 +274,12 @@ class Info(Enum): TOKENIZE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Tokenize request resolved." TOKENIZE_SUCCESS = f"{INFO}: [{error_prefix}] Data tokenized." + FILE_UPLOAD_TRIGGERED = f"{INFO}: [{error_prefix}] File upload method triggered." + VALIDATING_FILE_UPLOAD_REQUEST = f"{INFO}: [{error_prefix}] Validating file upload request." + FILE_UPLOAD_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] File upload request resolved." + FILE_UPLOAD_SUCCESS = f"{INFO}: [{error_prefix}] File uploaded successfully." + FILE_UPLOAD_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] File upload failed." + INVOKE_CONNECTION_TRIGGERED = f"{INFO}: [{error_prefix}] Invoke connection method triggered." VALIDATING_INVOKE_CONNECTION_REQUEST = f"{INFO}: [{error_prefix}] Validating invoke connection request." INVOKE_CONNECTION_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Invoke connection request resolved." diff --git a/skyflow/utils/validations/__init__.py b/skyflow/utils/validations/__init__.py index b8ce13c8..2f0bc710 100644 --- a/skyflow/utils/validations/__init__.py +++ b/skyflow/utils/validations/__init__.py @@ -12,6 +12,7 @@ validate_update_request, validate_detokenize_request, validate_tokenize_request, + validate_file_upload_request, validate_invoke_connection_params, validate_deidentify_text_request, validate_reidentify_text_request, diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index bbca6e85..f88388ad 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -1,3 +1,4 @@ +import base64 import json import os from skyflow.generated.rest import TokenType @@ -692,6 +693,69 @@ def validate_tokenize_request(logger, request): log_error_log(SkyflowMessages.ErrorLogs.EMPTY_COLUMN_GROUP_IN_COLUMN_VALUES.value.format("TOKENIZE"), logger = logger) raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETER_COLUMN_GROUP.value.format(i), invalid_input_error_code) + +def validate_file_upload_request(logger, request): + if request is None: + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + + # Table + table = getattr(request, "table", None) + if table is None: + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + elif table.strip() == "": + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + # Skyflow ID + skyflow_id = getattr(request, "skyflow_id", None) + if skyflow_id is None: + raise SkyflowError(SkyflowMessages.Error.IDS_KEY_ERROR.value, invalid_input_error_code) + elif skyflow_id.strip() == "": + raise SkyflowError(SkyflowMessages.Error.EMPTY_SKYFLOW_ID.value.format("FILE_UPLOAD"), invalid_input_error_code) + + # Column Name + column_name = getattr(request, "column_name", None) + if column_name is None: + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + elif column_name.strip() == "": + logger.error("Empty column name in FILE_UPLOAD") + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + + # File-related attributes + file_path = getattr(request, "file_path", None) + base64_str = getattr(request, "base64", None) + file_object = getattr(request, "file_object", None) + file_name = getattr(request, "file_name", None) + + # Check file_path first if present + if not is_none_or_empty(file_path): + if not os.path.exists(file_path) or not os.path.isfile(file_path): + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_PATH.value, invalid_input_error_code) + return + + # Check base64 if present + if not is_none_or_empty(base64_str): + if is_none_or_empty(file_name): + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_NAME.value, invalid_input_error_code) + try: + base64.b64decode(base64_str) + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_BASE64_STRING.value, invalid_input_error_code) + return + + # Check file_object if present + if file_object is not None: + try: + file_object.seek(0, 1) + return + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_OBJECT.value, invalid_input_error_code) + + # If none of the above, raise missing file source error + raise SkyflowError(SkyflowMessages.Error.MISSING_FILE_SOURCE.value, invalid_input_error_code) + +def is_none_or_empty(value: str) -> bool: + return value is None or (isinstance(value, str) and value.strip() == "") + def validate_invoke_connection_params(logger, query_params, path_params): if not isinstance(path_params, dict): raise SkyflowError(SkyflowMessages.Error.INVALID_PATH_PARAMS.value, invalid_input_error_code) diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index 4602cf87..fe921293 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -1,6 +1,10 @@ +import base64 import json +import os +from typing import Optional from skyflow.generated.rest import V1FieldRecords, V1BatchRecord, V1TokenizeRecordRequest, \ V1DetokenizeRecordRequest +from skyflow.generated.rest.core.file import File from skyflow.utils import SkyflowMessages, parse_insert_response, \ handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, \ parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values, get_metrics @@ -8,8 +12,8 @@ from skyflow.utils.enums import RequestMethod from skyflow.utils.logger import log_info, log_error_log from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ - validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request -from skyflow.vault.data import InsertRequest, UpdateRequest, DeleteRequest, GetRequest, QueryRequest + validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request, validate_file_upload_request +from skyflow.vault.data import InsertRequest, UpdateRequest, DeleteRequest, GetRequest, QueryRequest, FileUploadRequest, FileUploadResponse from skyflow.vault.tokens import DetokenizeRequest, TokenizeRequest class Vault: @@ -62,7 +66,27 @@ def __build_insert_body(self, request: InsertRequest): else: records_list = self.__build_bulk_field_records(request.values, request.tokens) return records_list + + def __get_file_for_file_upload(self, request: FileUploadRequest) -> Optional[File]: + if request.file_path: + if not request.file_name: + request.file_name = os.path.basename(request.file_path) + with open(request.file_path, "rb") as f: + file_bytes = f.read() + return (request.file_name, file_bytes) + + elif request.base64 and request.file_name: + decoded_bytes = base64.b64decode(request.base64) + return (request.file_name, decoded_bytes) + + elif request.file_object is not None: + if hasattr(request.file_object, "name") and request.file_object.name: + file_name = os.path.basename(request.file_object.name) + return (file_name, request.file_object) + + return None + def __get_headers(self): headers = { SKY_META_DATA_HEADER: json.dumps(get_metrics()) @@ -244,4 +268,31 @@ def tokenize(self, request: TokenizeRequest): return tokenize_response except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) - handle_exception(e, self.__vault_client.get_logger()) \ No newline at end of file + handle_exception(e, self.__vault_client.get_logger()) + + def upload_file(self, request: FileUploadRequest): + log_info(SkyflowMessages.Info.FILE_UPLOAD_TRIGGERED.value, self.__vault_client.get_logger()) + log_info(SkyflowMessages.Info.VALIDATING_FILE_UPLOAD_REQUEST.value, self.__vault_client.get_logger()) + validate_file_upload_request(self.__vault_client.get_logger(), request) + self.__initialize() + file_upload_api = self.__vault_client.get_records_api().with_raw_response + try: + api_response = file_upload_api.upload_file_v_2( + self.__vault_client.get_vault_id(), + table_name=request.table, + column_name=request.column_name, + file=self.__get_file_for_file_upload(request), + skyflow_id=request.skyflow_id, + return_file_metadata= False, + request_options=self.__get_headers() + ) + log_info(SkyflowMessages.Info.FILE_UPLOAD_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + log_info(SkyflowMessages.Info.FILE_UPLOAD_SUCCESS.value, self.__vault_client.get_logger()) + upload_response = FileUploadResponse( + skyflow_id=api_response.data.skyflow_id, + errors=None + ) + return upload_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.FILE_UPLOAD_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) diff --git a/skyflow/vault/data/__init__.py b/skyflow/vault/data/__init__.py index b43b23cf..d711f4f6 100644 --- a/skyflow/vault/data/__init__.py +++ b/skyflow/vault/data/__init__.py @@ -8,4 +8,6 @@ from ._update_response import UpdateResponse from ._upload_file_request import UploadFileRequest from ._query_request import QueryRequest -from ._query_response import QueryResponse \ No newline at end of file +from ._query_response import QueryResponse +from ._file_upload_request import FileUploadRequest +from ._file_upload_response import FileUploadResponse \ No newline at end of file diff --git a/skyflow/vault/data/_file_upload_request.py b/skyflow/vault/data/_file_upload_request.py new file mode 100644 index 00000000..d1bd4a44 --- /dev/null +++ b/skyflow/vault/data/_file_upload_request.py @@ -0,0 +1,18 @@ +from typing import BinaryIO + +class FileUploadRequest: + def __init__(self, + table: str, + skyflow_id: str, + column_name: str, + file_path: str= None, + base64: str= None, + file_object: BinaryIO= None, + file_name: str= None): + self.table = table + self.skyflow_id = skyflow_id + self.column_name = column_name + self.file_path = file_path + self.base64 = base64 + self.file_object = file_object + self.file_name = file_name diff --git a/skyflow/vault/data/_file_upload_response.py b/skyflow/vault/data/_file_upload_response.py new file mode 100644 index 00000000..18218f08 --- /dev/null +++ b/skyflow/vault/data/_file_upload_response.py @@ -0,0 +1,6 @@ +class FileUploadResponse: + def __init__(self, + skyflow_id, + errors): + self.skyflow_id = skyflow_id + self.errors = errors diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 0c8a7743..8d1d1ab0 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -1,12 +1,14 @@ import unittest -from unittest.mock import Mock, patch +from unittest.mock import Mock, patch, mock_open as mock_open_func, mock_open from skyflow.generated.rest import V1BatchRecord, V1FieldRecords, V1DetokenizeRecordRequest, V1TokenizeRecordRequest +from skyflow.utils._skyflow_messages import SkyflowMessages from skyflow.utils.enums import RedactionType, TokenMode from skyflow.vault.controller import Vault from skyflow.vault.data import InsertRequest, InsertResponse, UpdateResponse, UpdateRequest, DeleteResponse, \ - DeleteRequest, GetRequest, GetResponse, QueryRequest, QueryResponse + DeleteRequest, GetRequest, GetResponse, QueryRequest, QueryResponse, FileUploadRequest from skyflow.vault.tokens import DetokenizeRequest, DetokenizeResponse, TokenizeResponse, TokenizeRequest - +from skyflow.error import SkyflowError +from skyflow.utils.validations import validate_file_upload_request VAULT_ID = "test_vault_id" TABLE_NAME = "test_table" @@ -598,3 +600,277 @@ def test_tokenize_handles_generic_error(self, mock_validate): self.vault.tokenize(request) tokens_api.record_service_tokenize.assert_called_once() + + @patch("skyflow.vault.controller._vault.validate_file_upload_request") + def test_upload_file_with_file_path_successful(self, mock_validate): + """Test upload_file functionality using file path.""" + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_path="/path/to/test.txt", + ) + + # Mock file open + mocked_open = mock_open_func(read_data=b"test file content") + + # Mock API response + mock_api_response = Mock() + mock_api_response.data = Mock(skyflow_id="123") + + records_api = self.vault_client.get_records_api.return_value + records_api.with_raw_response.upload_file_v_2.return_value = mock_api_response + + with patch('builtins.open', mocked_open): + result = self.vault.upload_file(request) + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + mocked_open.assert_called_once_with("/path/to/test.txt", "rb") + self.assertEqual(result.skyflow_id, "123") + self.assertIsNone(result.errors) + + @patch("skyflow.vault.controller._vault.validate_file_upload_request") + def test_upload_file_with_base64_successful(self, mock_validate): + """Test upload_file functionality using base64 content.""" + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + base64="dGVzdCBmaWxlIGNvbnRlbnQ=", # "test file content" in base64 + file_name="test.txt" + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.data = Mock(skyflow_id="123") + + records_api = self.vault_client.get_records_api.return_value + records_api.with_raw_response.upload_file_v_2.return_value = mock_api_response + + # Call upload_file + result = self.vault.upload_file(request) + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + self.assertEqual(result.skyflow_id, "123") + self.assertIsNone(result.errors) + + @patch("skyflow.vault.controller._vault.validate_file_upload_request") + def test_upload_file_with_file_object_successful(self, mock_validate): + """Test upload_file functionality using file object.""" + + # Create mock file object + mock_file = Mock() + mock_file.name = "test.txt" + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_object=mock_file + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.data = Mock(skyflow_id="123") + + records_api = self.vault_client.get_records_api.return_value + records_api.with_raw_response.upload_file_v_2.return_value = mock_api_response + + # Call upload_file + result = self.vault.upload_file(request) + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + self.assertEqual(result.skyflow_id, "123") + self.assertIsNone(result.errors) + + @patch("skyflow.vault.controller._vault.validate_file_upload_request") + def test_upload_file_handles_api_error(self, mock_validate): + """Test upload_file error handling for API errors.""" + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_path="/path/to/test.txt" + ) + + # Mock API error + records_api = self.vault_client.get_records_api.return_value + records_api.with_raw_response.upload_file_v_2.side_effect = Exception("Upload failed") + + # Assert that the exception is propagated + with patch('builtins.open', mock_open(read_data=b"test content")): + with self.assertRaises(Exception): + self.vault.upload_file(request) + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + + @patch("skyflow.vault.controller._vault.validate_file_upload_request") + def test_upload_file_with_missing_file_source(self, mock_validate): + """Test upload_file with no file source specified.""" + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123" + ) + + mock_validate.side_effect = SkyflowError(SkyflowMessages.Error.MISSING_FILE_SOURCE.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + with self.assertRaises(SkyflowError) as error: + self.vault.upload_file(request) + + self.assertEqual(error.exception.message, SkyflowMessages.Error.MISSING_FILE_SOURCE.value) + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + +class TestFileUploadValidation(unittest.TestCase): + def setUp(self): + self.logger = Mock() + + def test_validate_invalid_table(self): + """Test validation fails when table is empty""" + request = FileUploadRequest( + table="", + column_name="file_column", + skyflow_id="123", + file_path="/path/to/file.txt" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.EMPTY_TABLE_VALUE.value) + + def test_validate_empty_skyflow_id(self): + """Test validation fails when skyflow_id is empty""" + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="", + file_path="/path/to/file.txt" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, + SkyflowMessages.Error.EMPTY_SKYFLOW_ID.value.format("FILE_UPLOAD")) + + def test_validate_invalid_column_name(self): + """Test validation fails when column_name is missing""" + request = FileUploadRequest( + table="test_table", + skyflow_id="123", + column_name="", + file_path="/path/to/file.txt" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, + SkyflowMessages.Error.INVALID_FILE_COLUMN_NAME.value.format("FILE_UPLOAD")) + + + @patch('os.path.exists') + @patch('os.path.isfile') + def test_validate_file_path_success(self, mock_isfile, mock_exists): + """Test validation succeeds with valid file path""" + mock_exists.return_value = True + mock_isfile.return_value = True + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_path="/path/to/file.txt" + ) + validate_file_upload_request(self.logger, request) + mock_exists.assert_called_once_with("/path/to/file.txt") + mock_isfile.assert_called_once_with("/path/to/file.txt") + + @patch('os.path.exists') + def test_validate_invalid_file_path(self, mock_exists): + """Test validation fails with invalid file path""" + mock_exists.return_value = False + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_path="/invalid/path.txt" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.INVALID_FILE_PATH.value) + + def test_validate_base64_success(self): + """Test validation succeeds with valid base64""" + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + base64="dGVzdCBmaWxlIGNvbnRlbnQ=", + file_name="test.txt" + ) + validate_file_upload_request(self.logger, request) + + def test_validate_base64_without_filename(self): + """Test validation fails with base64 but no filename""" + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + base64="dGVzdCBmaWxlIGNvbnRlbnQ=" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.INVALID_FILE_NAME.value) + + def test_validate_invalid_base64(self): + """Test validation fails with invalid base64""" + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + base64="invalid-base64", + file_name="test.txt" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.INVALID_BASE64_STRING.value) + + def test_validate_file_object_success(self): + """Test validation succeeds with valid file object""" + mock_file = Mock() + mock_file.seek = Mock() # Add seek method + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_object=mock_file + ) + validate_file_upload_request(self.logger, request) + + def test_validate_invalid_file_object(self): + """Test validation fails with invalid file object""" + mock_file = Mock() + mock_file.seek = Mock(side_effect=Exception()) # Make seek fail + + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123", + file_object=mock_file + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.INVALID_FILE_OBJECT.value) + + def test_validate_missing_file_source(self): + """Test validation fails when no file source is provided""" + request = FileUploadRequest( + table="test_table", + column_name="file_column", + skyflow_id="123" + ) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.MISSING_FILE_SOURCE.value) + with self.assertRaises(SkyflowError) as error: + validate_file_upload_request(self.logger, request) + self.assertEqual(error.exception.message, SkyflowMessages.Error.MISSING_FILE_SOURCE.value) From 3cafb5852cb4eb7d78df0b9de3142bc18c7e2c1b Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Wed, 10 Sep 2025 14:10:00 +0000 Subject: [PATCH 41/60] [AUTOMATED] Private Release 2.1.0b1.dev0+61e368f --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a205e472..b349bede 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.1.0b1' +current_version = '2.1.0b1.dev0+61e368f' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 2e213012..fab22bb1 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.1.0b1' \ No newline at end of file +SDK_VERSION = '2.1.0b1.dev0+61e368f' \ No newline at end of file From 8e1d65351de5918610fddea288c3f468b714a4e6 Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Wed, 17 Sep 2025 16:26:28 +0530 Subject: [PATCH 42/60] SK-2270 fix inconsistencies in python SDK V2 (#193) * SK-2270: fix inconsistencies in python SDK V2 --- README.md | 49 +- samples/detect_api/deidentify_file.py | 4 +- samples/vault_api/insert_byot.py | 2 +- samples/vault_api/insert_records.py | 2 +- skyflow/utils/_skyflow_messages.py | 1 + skyflow/utils/_utils.py | 3 + skyflow/utils/validations/_validations.py | 6 +- skyflow/vault/controller/_detect.py | 57 +- skyflow/vault/controller/_vault.py | 7 +- skyflow/vault/data/_insert_request.py | 4 +- .../vault/detect/_deidentify_file_response.py | 4 +- skyflow/vault/detect/_file_input.py | 6 +- skyflow/vault/tokens/_tokenize_response.py | 5 +- tests/utils/validations/__init__.py | 0 tests/utils/validations/test__validations.py | 1046 +++++++++++++++++ tests/vault/controller/test__detect.py | 19 +- tests/vault/controller/test__vault.py | 8 +- 17 files changed, 1165 insertions(+), 58 deletions(-) create mode 100644 tests/utils/validations/__init__.py create mode 100644 tests/utils/validations/test__validations.py diff --git a/README.md b/README.md index 39a58429..67b0d1c9 100644 --- a/README.md +++ b/README.md @@ -215,7 +215,7 @@ table_name = '' # Replace with your actual table name # Create Insert Request insert_request = InsertRequest( - table_name=table_name, + table=table_name, values=insert_data, return_tokens=True, # Optional: Get tokens for inserted data continue_on_error=True # Optional: Continue on partial errors @@ -273,7 +273,7 @@ options = InsertOptions( ```python insert_request = InsertRequest( - table_name=table_name, # Replace with the table name + table=table_name, # Replace with the table name values=insert_data, return_tokens=False, # Do not return tokens continue_on_error=False, # Stop inserting if any record fails @@ -474,7 +474,7 @@ try: # Step 2: Create Insert Request insert_request = InsertRequest( - table_name='table1', # Specify the table in the vault where the data will be inserted + table='table1', # Specify the table in the vault where the data will be inserted values=insert_data, # Attach the data (records) to be inserted return_tokens=True, # Specify if tokens should be returned upon successful insertion continue_on_error=True # Optional: Continue on partial errors @@ -551,7 +551,7 @@ try: # Step 2: Build an InsertRequest object with the table name and the data to insert insert_request = InsertRequest( - table_name='', # Replace with the actual table name in your Skyflow vault + table='', # Replace with the actual table name in your Skyflow vault values=insert_data, # Attach the data to be inserted ) @@ -608,7 +608,7 @@ try: # Step 4: Build the InsertRequest object with the data records to insert insert_request = InsertRequest( - table_name='table1', # Specify the table in the vault where the data will be inserted + table='table1', # Specify the table in the vault where the data will be inserted values=insert_data, # Attach the data (records) to be inserted return_tokens=True, # Specify if tokens should be returned upon successful insertion continue_on_error=True # Specify to continue inserting records even if an error occurs for some records @@ -686,7 +686,7 @@ try: # Step 3: Build the InsertRequest object with the upsertData insert_request = InsertRequest( - table_name='table1', # Specify the table in the vault where the data will be inserted + table='table1', # Specify the table in the vault where the data will be inserted values=insert_data, # Attach the data (records) to be inserted return_tokens=True, # Specify if tokens should be returned upon successful insertion upsert='cardholder_name' # Specify the field to be used for upsert operations (e.g., cardholder_name) @@ -1897,23 +1897,24 @@ ReidentifyTextResponse( ``` ### Deidentify File -To deidentify files, use the `deidentify_file` method. The `DeidentifyFileRequest` class creates a deidentify file request, which includes the file to be deidentified and various configuration options. +To deidentify files, use the `deidentify_file` method. The `DeidentifyFileRequest` class creates a deidentify file request, supports providing either a file or a file path in class FileInput for de-identification, along with various configuration options. #### Construct a Deidentify File request ```python from skyflow.error import SkyflowError from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions -from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, Bleep +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, Bleep, FileInput """ This example demonstrates how to deidentify file, along with corresponding DeidentifyFileRequest schema. """ try: # Initialize Skyflow client # Step 1: Open file for deidentification - file = open('', 'rb') # Open the file in read-binary mode + file_path="" + file = open(file_path, 'rb') # Open the file in read-binary mode # Step 2: Create deidentify file request request = DeidentifyFileRequest( - file=file, # File object to deidentify + file=FileInput(file), # File to de-identify (can also provide a file path) entities=[DetectEntities.SSN, DetectEntities.CREDIT_CARD], # Entities to detect # Token format configuration @@ -1971,7 +1972,7 @@ except Exception as error: ```python from skyflow.error import SkyflowError from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions -from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Bleep +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Bleep, FileInput """ * Skyflow Deidentify File Example * @@ -1985,7 +1986,7 @@ try: file = open('sensitive_document.txt', 'rb') # Open the file in read-binary mode # Step 2: Create deidentify file request request = DeidentifyFileRequest( - file=file, # File object to deidentify + file=FileInput(file), # File to de-identify (can also provide a file path) entities=[ DetectEntities.SSN, DetectEntities.CREDIT_CARD @@ -2038,7 +2039,6 @@ DeidentifyFileResponse( ], run_id='83abcdef-2b61-4a83-a4e0-cbc71ffabffd', status='SUCCESS', - errors=[] ) ``` @@ -2121,7 +2121,7 @@ except Exception as error: print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` -Sample Response +Sample Response: ```python DeidentifyFileResponse( file='TXkgY2FyZCBudW1iZXIgaXMgW0NSRURJVF9DQVJEXQpteSBzZWNvbmQ…', # Base64 encoded file content @@ -2142,7 +2142,26 @@ DeidentifyFileResponse( ], run_id='48ec05ba-96ec-4641-a8e2-35e066afef95', status='SUCCESS', - errors=[] +) +``` + +Incase of invalid/expired RunId: + +```python +DeidentifyFileResponse( + file_base64=None, + file=None, + type='UNKNOWN', + extension=None, + word_count=None, + char_count=None, + size_in_kb=0.0, + duration_in_seconds=None, + page_count=None, + slide_count=None, + entities=[], + run_id='1e9f321f-dd51-4ab1-a014-21212fsdfsd', + status='UNKNOWN' ) ``` diff --git a/samples/detect_api/deidentify_file.py b/samples/detect_api/deidentify_file.py index c9877d58..99b4b26e 100644 --- a/samples/detect_api/deidentify_file.py +++ b/samples/detect_api/deidentify_file.py @@ -1,7 +1,7 @@ from skyflow.error import SkyflowError from skyflow import Env, Skyflow, LogLevel from skyflow.utils.enums import DetectEntities, MaskingMethod, DetectOutputTranscriptions -from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, DateTransformation, Bleep +from skyflow.vault.detect import DeidentifyFileRequest, TokenFormat, Transformations, DateTransformation, Bleep, FileInput """ * Skyflow Deidentify File Example @@ -39,7 +39,7 @@ def perform_file_deidentification(): file = open(file_path, 'rb') # Step 5: Configure Deidentify File Request with all options deidentify_request = DeidentifyFileRequest( - file=file, # File object to deidentify + file=FileInput(file), # File to de-identify (can also provide a file path) entities=[DetectEntities.SSN, DetectEntities.CREDIT_CARD], # Entities to detect allow_regex_list=[''], # Optional: Patterns to allow restrict_regex_list=[''], # Optional: Patterns to restrict diff --git a/samples/vault_api/insert_byot.py b/samples/vault_api/insert_byot.py index ae4c1eae..5161f886 100644 --- a/samples/vault_api/insert_byot.py +++ b/samples/vault_api/insert_byot.py @@ -70,7 +70,7 @@ def perform_secure_data_insertion_with_byot(): ] insert_request = InsertRequest( - table_name=table_name, + table=table_name, values=insert_data, token_mode=TokenMode.ENABLE, # Enable Bring Your Own Token (BYOT) tokens=tokens, # Specify tokens to use for BYOT diff --git a/samples/vault_api/insert_records.py b/samples/vault_api/insert_records.py index 32ec1fae..76ec2259 100644 --- a/samples/vault_api/insert_records.py +++ b/samples/vault_api/insert_records.py @@ -47,7 +47,7 @@ def perform_secure_data_insertion(): # Step 5: Create Insert Request insert_request = InsertRequest( - table_name=table_name, + table=table_name, values=insert_data, return_tokens=True, # Optional: Get tokens for inserted data continue_on_error=True # Optional: Continue on partial errors diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py index 8401aeb6..3672cfa8 100644 --- a/skyflow/utils/_skyflow_messages.py +++ b/skyflow/utils/_skyflow_messages.py @@ -383,6 +383,7 @@ class ErrorLogs(Enum): DEIDENTIFY_FILE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify file resulted in failure." DETECT_RUN_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Detect get run resulted in failure." DEIDENTIFY_TEXT_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify text resulted in failure." + SAVING_DEIDENTIFY_FILE_FAILED = f"{ERROR}: [{error_prefix}] Error while saving deidentified file to output directory." REIDENTIFY_TEXT_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Reidentify text resulted in failure." DETECT_FILE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Deidentify file resulted in failure." diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 77ffe580..114079b5 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -447,3 +447,6 @@ def encode_column_values(get_request): encoded_column_values.append(quote(column)) return encoded_column_values + +def get_attribute(obj, camel_case, snake_case): + return getattr(obj, camel_case, None) or getattr(obj, snake_case, None) diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index f88388ad..4428d11e 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -277,7 +277,7 @@ def validate_file_from_request(file_input: FileInput): raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_TYPE.value, invalid_input_error_code) # Validate file name - file_name = os.path.splitext(file.name)[0] + file_name, _ = os.path.splitext(os.path.basename(file.name)) if not file_name or not file_name.strip(): raise SkyflowError(SkyflowMessages.Error.INVALID_FILE_NAME.value, invalid_input_error_code) @@ -394,10 +394,10 @@ def validate_deidentify_file_request(logger, request: DeidentifyFileRequest): raise SkyflowError(SkyflowMessages.Error.WAIT_TIME_GREATER_THEN_64.value, invalid_input_error_code) def validate_insert_request(logger, request): - if not isinstance(request.table_name, str): + if not isinstance(request.table, str): log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("INSERT"), logger = logger) raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) - if not request.table_name.strip(): + if not request.table.strip(): log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("INSERT"), logger = logger) raise SkyflowError(SkyflowMessages.Error.MISSING_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py index 93fac69e..62d551c1 100644 --- a/skyflow/vault/controller/_detect.py +++ b/skyflow/vault/controller/_detect.py @@ -6,8 +6,9 @@ from skyflow.generated.rest import DeidentifyTextRequestFile, DeidentifyAudioRequestFile, DeidentifyPdfRequestFile, \ DeidentifyImageRequestFile, DeidentifyPresentationRequestFile, DeidentifySpreadsheetRequestFile, \ DeidentifyDocumentRequestFile, DeidentifyFileRequestFile +from skyflow.generated.rest.types.deidentify_status_response import DeidentifyStatusResponse from skyflow.utils._skyflow_messages import SkyflowMessages -from skyflow.utils._utils import get_metrics, handle_exception, parse_deidentify_text_response, parse_reidentify_text_response +from skyflow.utils._utils import get_attribute, get_metrics, handle_exception, parse_deidentify_text_response, parse_reidentify_text_response from skyflow.utils.constants import SKY_META_DATA_HEADER from skyflow.utils.logger import log_info, log_error_log from skyflow.utils.validations import validate_deidentify_file_request, validate_get_detect_run_request @@ -83,6 +84,43 @@ def __poll_for_processed_file(self, run_id, max_wait_time=64): except Exception as e: raise e + def __save_deidentify_file_response_output(self, response: DeidentifyStatusResponse, output_directory: str, original_file_name: str, name_without_ext: str): + if not response or not hasattr(response, 'output') or not response.output or not output_directory: + return + + if not os.path.exists(output_directory): + return + + deidentify_file_prefix = "processed-" + output_list = response.output + + base_original_filename = os.path.basename(original_file_name) + base_name_without_ext = os.path.splitext(base_original_filename)[0] + + for idx, output in enumerate(output_list): + try: + processed_file = get_attribute(output, 'processedFile', 'processed_file') + processed_file_type = get_attribute(output, 'processedFileType', 'processed_file_type') + processed_file_extension = get_attribute(output, 'processedFileExtension', 'processed_file_extension') + + if not processed_file: + continue + + decoded_data = base64.b64decode(processed_file) + + if idx == 0 or processed_file_type == 'redacted_file': + output_file_name = os.path.join(output_directory, deidentify_file_prefix + base_original_filename) + if processed_file_extension: + output_file_name = os.path.join(output_directory, f"{deidentify_file_prefix}{base_name_without_ext}.{processed_file_extension}") + else: + output_file_name = os.path.join(output_directory, f"{deidentify_file_prefix}{base_name_without_ext}.{processed_file_extension}") + + with open(output_file_name, 'wb') as f: + f.write(decoded_data) + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.SAVING_DEIDENTIFY_FILE_FAILED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + def __parse_deidentify_file_response(self, data, run_id=None, status=None): output = getattr(data, "output", []) status_val = getattr(data, "status", None) or status @@ -141,8 +179,8 @@ def output_to_dict_list(output): return DeidentifyFileResponse( file_base64=base64_string, - file=file_obj, # File class will be instantiated in DeidentifyFileResponse - type=first_output.get("type", None), + file=file_obj, + type=first_output.get("type", "UNKNOWN"), extension=extension, word_count=word_count, char_count=char_count, @@ -153,7 +191,6 @@ def output_to_dict_list(output): entities=entities, run_id=run_id_val, status=status_val, - errors=None ) def __get_token_format(self, request): @@ -396,12 +433,11 @@ def deidentify_file(self, request: DeidentifyFileRequest): run_id = getattr(api_response.data, 'run_id', None) processed_response = self.__poll_for_processed_file(run_id, request.wait_time) - parsed_response = self.__parse_deidentify_file_response(processed_response, run_id) if request.output_directory and processed_response.status == 'SUCCESS': - file_name_only = 'processed-'+os.path.basename(file_name) - output_file_path = f"{request.output_directory}/{file_name_only}" - with open(output_file_path, 'wb') as output_file: - output_file.write(base64.b64decode(parsed_response.file_base64)) + name_without_ext, _ = os.path.splitext(file_name) + self.__save_deidentify_file_response_output(processed_response, request.output_directory, file_name, name_without_ext) + + parsed_response = self.__parse_deidentify_file_response(processed_response, run_id) log_info(SkyflowMessages.Info.DETECT_FILE_SUCCESS.value, self.__vault_client.get_logger()) return parsed_response @@ -411,9 +447,9 @@ def deidentify_file(self, request: DeidentifyFileRequest): handle_exception(e, self.__vault_client.get_logger()) def get_detect_run(self, request: GetDetectRunRequest): + log_info(SkyflowMessages.Info.GET_DETECT_RUN_TRIGGERED.value,self.__vault_client.get_logger()) log_info(SkyflowMessages.Info.VALIDATING_GET_DETECT_RUN_INPUT.value, self.__vault_client.get_logger()) validate_get_detect_run_request(self.__vault_client.get_logger(), request) - log_info(SkyflowMessages.Info.DEIDENTIFY_TEXT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) self.__initialize() files_api = self.__vault_client.get_detect_file_api().with_raw_response @@ -428,6 +464,7 @@ def get_detect_run(self, request: GetDetectRunRequest): parsed_response = self.__parse_deidentify_file_response(DeidentifyFileResponse(run_id=run_id, status='IN_PROGRESS')) else: parsed_response = self.__parse_deidentify_file_response(response.data, run_id, response.data.status) + log_info(SkyflowMessages.Info.GET_DETECT_RUN_SUCCESS.value,self.__vault_client.get_logger()) return parsed_response except Exception as e: log_error_log(SkyflowMessages.ErrorLogs.DETECT_FILE_REQUEST_REJECTED.value, diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py index fe921293..7cc9ec77 100644 --- a/skyflow/vault/controller/_vault.py +++ b/skyflow/vault/controller/_vault.py @@ -10,6 +10,7 @@ parse_tokenize_response, parse_query_response, parse_get_response, encode_column_values, get_metrics from skyflow.utils.constants import SKY_META_DATA_HEADER from skyflow.utils.enums import RequestMethod +from skyflow.utils.enums.redaction_type import RedactionType from skyflow.utils.logger import log_info, log_error_log from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request, validate_file_upload_request @@ -57,7 +58,7 @@ def __build_insert_body(self, request: InsertRequest): records_list = self.__build_batch_field_records( request.values, request.tokens, - request.table_name, + request.table, request.return_tokens, request.upsert ) @@ -109,7 +110,7 @@ def insert(self, request: InsertRequest): else: api_response = records_api.record_service_insert_record(self.__vault_client.get_vault_id(), - request.table_name, records=insert_body,tokenization= request.return_tokens, upsert=request.upsert, homogeneous=request.homogeneous, byot=request.token_mode.value, request_options=self.__get_headers()) + request.table, records=insert_body,tokenization= request.return_tokens, upsert=request.upsert, homogeneous=request.homogeneous, byot=request.token_mode.value, request_options=self.__get_headers()) insert_response = parse_insert_response(api_response, request.continue_on_error) log_info(SkyflowMessages.Info.INSERT_SUCCESS.value, self.__vault_client.get_logger()) @@ -225,7 +226,7 @@ def detokenize(self, request: DetokenizeRequest): tokens_list = [ V1DetokenizeRecordRequest( token=item.get('token'), - redaction=item.get('redaction', None) + redaction=item.get('redaction', RedactionType.DEFAULT) ) for item in request.data ] diff --git a/skyflow/vault/data/_insert_request.py b/skyflow/vault/data/_insert_request.py index 742c5120..909edd88 100644 --- a/skyflow/vault/data/_insert_request.py +++ b/skyflow/vault/data/_insert_request.py @@ -2,7 +2,7 @@ class InsertRequest: def __init__(self, - table_name, + table, values, tokens = None, upsert = None, @@ -10,7 +10,7 @@ def __init__(self, token_mode = TokenMode.DISABLE, return_tokens = True, continue_on_error = False): - self.table_name = table_name + self.table = table self.values = values self.tokens = tokens self.upsert = upsert diff --git a/skyflow/vault/detect/_deidentify_file_response.py b/skyflow/vault/detect/_deidentify_file_response.py index 90a0d493..b340e21c 100644 --- a/skyflow/vault/detect/_deidentify_file_response.py +++ b/skyflow/vault/detect/_deidentify_file_response.py @@ -17,7 +17,6 @@ def __init__( entities: list = None, # list of dicts with keys 'file' and 'extension' run_id: str = None, status: str = None, - errors: list = None, ): self.file_base64 = file_base64 self.file = File(file) if file else None @@ -32,7 +31,6 @@ def __init__( self.entities = entities if entities is not None else [] self.run_id = run_id self.status = status - self.errors = errors def __repr__(self): return ( @@ -42,7 +40,7 @@ def __repr__(self): f"char_count={self.char_count!r}, size_in_kb={self.size_in_kb!r}, " f"duration_in_seconds={self.duration_in_seconds!r}, page_count={self.page_count!r}, " f"slide_count={self.slide_count!r}, entities={self.entities!r}, " - f"run_id={self.run_id!r}, status={self.status!r}, errors={self.errors!r})" + f"run_id={self.run_id!r}, status={self.status!r})" ) def __str__(self): diff --git a/skyflow/vault/detect/_file_input.py b/skyflow/vault/detect/_file_input.py index 472ca0e2..6b8bc2fb 100644 --- a/skyflow/vault/detect/_file_input.py +++ b/skyflow/vault/detect/_file_input.py @@ -1,13 +1,15 @@ +from io import BufferedReader + class FileInput: """ Represents a file input for the vault detection process. Attributes: - file (str): The file object to be processed. This can be a file-like object or a binary string. + file (BufferedReader): The file object to be processed. This can be a file-like object or a binary string. file_path (str): The path to the file to be processed. """ - def __init__(self, file: str= None, file_path: str = None): + def __init__(self, file: BufferedReader= None, file_path: str = None): self.file = file self.file_path = file_path diff --git a/skyflow/vault/tokens/_tokenize_response.py b/skyflow/vault/tokens/_tokenize_response.py index 264b3987..598c2a1c 100644 --- a/skyflow/vault/tokens/_tokenize_response.py +++ b/skyflow/vault/tokens/_tokenize_response.py @@ -1,10 +1,11 @@ class TokenizeResponse: - def __init__(self, tokenized_fields = None): + def __init__(self, tokenized_fields = None, errors = None): self.tokenized_fields = tokenized_fields + self.errors = errors def __repr__(self): - return f"TokenizeResponse(tokenized_fields={self.tokenized_fields})" + return f"TokenizeResponse(tokenized_fields={self.tokenized_fields}, errors={self.errors})" def __str__(self): return self.__repr__() diff --git a/tests/utils/validations/__init__.py b/tests/utils/validations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils/validations/test__validations.py b/tests/utils/validations/test__validations.py new file mode 100644 index 00000000..48332a55 --- /dev/null +++ b/tests/utils/validations/test__validations.py @@ -0,0 +1,1046 @@ +import unittest +from unittest.mock import Mock, patch, MagicMock +import tempfile +import os + +from skyflow.error import SkyflowError +from skyflow.utils.validations._validations import ( + validate_required_field, validate_api_key, validate_credentials, + validate_log_level, validate_keys, validate_vault_config, + validate_update_vault_config, validate_connection_config, + validate_update_connection_config, validate_file_from_request, + validate_insert_request, validate_delete_request, validate_query_request, + validate_get_detect_run_request, validate_get_request, validate_update_request, + validate_detokenize_request, validate_tokenize_request, validate_invoke_connection_params, + validate_deidentify_text_request, validate_reidentify_text_request, validate_deidentify_file_request +) +from skyflow.utils import SkyflowMessages +from skyflow.utils.enums import DetectEntities, RedactionType +from skyflow.vault.data import GetRequest, UpdateRequest +from skyflow.vault.detect import DeidentifyTextRequest, Transformations, DateTransformation, ReidentifyTextRequest, \ + FileInput, DeidentifyFileRequest +from skyflow.vault.tokens import DetokenizeRequest +from skyflow.vault.connection._invoke_connection_request import InvokeConnectionRequest + +class TestValidations(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.temp_file = tempfile.NamedTemporaryFile(delete=False) + cls.temp_file.write(b"test content") + cls.temp_file.close() + cls.temp_file_path = cls.temp_file.name + cls.temp_dir = tempfile.TemporaryDirectory() + cls.temp_dir_path = cls.temp_dir.name + + @classmethod + def tearDownClass(cls): + if os.path.exists(cls.temp_file_path): + os.unlink(cls.temp_file_path) + cls.temp_dir.cleanup() + + def setUp(self): + self.logger = Mock() + + def test_validate_required_field_valid(self): + config = {"test_field": "test_value"} + validate_required_field( + self.logger, + config, + "test_field", + str, + "Empty error", + "Invalid error" + ) + + def test_validate_required_field_missing(self): + config = {} + with self.assertRaises(SkyflowError) as context: + validate_required_field( + self.logger, + config, + "vault_id", + str, + "Empty error", + "Invalid error" + ) + self.assertEqual(context.exception.message, "Invalid error") + + def test_validate_required_field_empty_string(self): + config = {"test_field": ""} + with self.assertRaises(SkyflowError) as context: + validate_required_field( + self.logger, + config, + "test_field", + str, + "Empty error", + "Invalid error" + ) + self.assertEqual(context.exception.message, "Empty error") + + def test_validate_required_field_wrong_type(self): + config = {"test_field": 123} + with self.assertRaises(SkyflowError) as context: + validate_required_field( + self.logger, + config, + "test_field", + str, + "Empty error", + "Invalid error" + ) + self.assertEqual(context.exception.message, "Invalid error") + + def test_validate_api_key_valid(self): + valid_key = "sky-abc12-1234567890abcdef1234567890abcdef" + self.assertTrue(validate_api_key(valid_key, self.logger)) + + def test_validate_api_key_invalid_prefix(self): + invalid_key = "invalid-abc12-1234567890abcdef1234567890abcdef" + self.assertFalse(validate_api_key(invalid_key, self.logger)) + + def test_validate_api_key_invalid_length(self): + invalid_key = "sky-abc12-123456" + self.assertFalse(validate_api_key(invalid_key, self.logger)) + + def test_validate_credentials_with_api_key(self): + credentials = { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + validate_credentials(self.logger, credentials) + + def test_validate_credentials_with_expired_token(self): + credentials = { + "token": "expired_token" + } + with patch('skyflow.service_account.is_expired', return_value=True): + with self.assertRaises(SkyflowError) as context: + validate_credentials(self.logger, credentials) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value) + + def test_validate_credentials_empty_credentials(self): + credentials = {} + with self.assertRaises(SkyflowError) as context: + validate_credentials(self.logger, credentials) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS.value) + + def test_validate_credentials_multiple_auth_methods(self): + credentials = { + "token": "valid_token", + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + with self.assertRaises(SkyflowError) as context: + validate_credentials(self.logger, credentials) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MULTIPLE_CREDENTIALS_PASSED.value) + + + def test_validate_credentials_with_empty_context(self): + credentials = { + "token": "valid_token", + "context": "" + } + with patch('skyflow.service_account.is_expired', return_value=False): + with self.assertRaises(SkyflowError) as context: + validate_credentials(self.logger, credentials) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CONTEXT.value) + + def test_validate_log_level_valid(self): + from skyflow.utils.enums import LogLevel + log_level = LogLevel.ERROR + validate_log_level(self.logger, log_level) + + def test_validate_log_level_invalid(self): + class InvalidEnum: + pass + invalid_log_level = InvalidEnum() + with self.assertRaises(SkyflowError) as context: + validate_log_level(self.logger, invalid_log_level) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_LOG_LEVEL.value) + + def test_validate_log_level_none(self): + with self.assertRaises(SkyflowError) as context: + validate_log_level(self.logger, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_LOG_LEVEL.value) + + def test_validate_keys_valid(self): + config = {"vault_id": "test_id", "cluster_id": "test_cluster"} + validate_keys(self.logger, config, ["vault_id", "cluster_id"]) + + def test_validate_keys_invalid(self): + config = {"invalid_key": "value"} + with self.assertRaises(SkyflowError) as context: + validate_keys(self.logger, config, ["vault_id", "cluster_id"]) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_KEY.value.format("invalid_key")) + + def test_validate_vault_config_valid(self): + from skyflow.utils.enums import Env + config = { + "vault_id": "vault123", + "cluster_id": "cluster123", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + }, + "env": Env.DEV + } + self.assertTrue(validate_vault_config(self.logger, config)) + + def test_validate_vault_config_missing_required(self): + config = { + "cluster_id": "cluster123" + } + with self.assertRaises(SkyflowError) as context: + validate_vault_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_VAULT_ID.value) + + + def test_validate_update_vault_config_valid(self): + from skyflow.utils.enums import Env + config = { + "vault_id": "vault123", + "cluster_id": "cluster123", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + }, + "env": Env.DEV + } + self.assertTrue(validate_update_vault_config(self.logger, config)) + + def test_validate_update_vault_config_missing_credentials(self): + config = { + "vault_id": "vault123", + "cluster_id": "cluster123" + } + with self.assertRaises(SkyflowError) as context: + validate_update_vault_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("vault", "vault123")) + + def test_validate_update_vault_config_invalid_cluster_id(self): + config = { + "vault_id": "vault123", + "cluster_id": "", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + with self.assertRaises(SkyflowError) as context: + validate_update_vault_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format("vault123")) + + def test_validate_connection_config_valid(self): + config = { + "connection_id": "conn123", + "connection_url": "https://example.com", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + self.assertTrue(validate_connection_config(self.logger, config)) + + def test_validate_connection_config_missing_url(self): + config = { + "connection_id": "conn123", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + with self.assertRaises(SkyflowError) as context: + validate_connection_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CONNECTION_URL.value.format("conn123")) + + def test_validate_connection_config_empty_connection_id(self): + config = { + "connection_id": "", + "connection_url": "https://example.com", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + with self.assertRaises(SkyflowError) as context: + validate_connection_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CONNECTION_ID.value) + + def test_validate_update_connection_config_valid(self): + config = { + "connection_id": "conn123", + "connection_url": "https://example.com", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + self.assertTrue(validate_update_connection_config(self.logger, config)) + + def test_validate_update_connection_config_missing_credentials(self): + config = { + "connection_id": "conn123", + "connection_url": "https://example.com" + } + with self.assertRaises(SkyflowError) as context: + validate_update_connection_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("connection", "conn123")) + + def test_validate_update_connection_config_empty_url(self): + config = { + "connection_id": "conn123", + "connection_url": "", + "credentials": { + "api_key": "sky-abc12-1234567890abcdef1234567890abcdef" + } + } + with self.assertRaises(SkyflowError) as context: + validate_update_connection_config(self.logger, config) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CONNECTION_URL.value.format("conn123")) + + def test_validate_file_from_request_valid_file(self): + file_obj = MagicMock() + file_obj.name = "test.txt" + file_input = MagicMock() + file_input.file = file_obj + file_input.file_path = None + validate_file_from_request(file_input) + + def test_validate_file_from_request_valid_file_path(self): + file_input = MagicMock() + file_input.file = None + file_input.file_path = self.temp_file_path + validate_file_from_request(file_input) + + def test_validate_file_from_request_missing_both(self): + file_input = MagicMock() + file_input.file = None + file_input.file_path = None + with self.assertRaises(SkyflowError) as context: + validate_file_from_request(file_input) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_INPUT.value) + + def test_validate_file_from_request_both_provided(self): + file_obj = MagicMock() + file_obj.name = "test.txt" + file_input = MagicMock() + file_input.file = file_obj + file_input.file_path = "/path/to/file" + with self.assertRaises(SkyflowError) as context: + validate_file_from_request(file_input) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_INPUT.value) + + + def test_validate_file_from_request_invalid_file_path(self): + file_input = MagicMock() + file_input.file = None + file_input.file_path = "/nonexistent/path/to/file" + with self.assertRaises(SkyflowError) as context: + validate_file_from_request(file_input) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_DEIDENTIFY_FILE_PATH.value) + + def test_validate_insert_request_valid(self): + request = MagicMock() + request.table = "test_table" + request.values = [{"field1": "value1"}] + request.upsert = None + request.homogeneous = None + request.token_mode = None + request.return_tokens = False + request.continue_on_error = False + request.tokens = None + validate_insert_request(self.logger, request) + + def test_validate_insert_request_invalid_table(self): + request = MagicMock() + request.table = 123 + request.values = [{"field1": "value1"}] + with self.assertRaises(SkyflowError) as context: + validate_insert_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TABLE_NAME_IN_INSERT.value) + + def test_validate_insert_request_empty_values(self): + request = MagicMock() + request.table = "test_table" + request.values = [] + with self.assertRaises(SkyflowError) as context: + validate_insert_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_DATA_IN_INSERT.value) + + + def test_validate_delete_request_valid(self): + request = MagicMock() + request.table = "test_table" + request.ids = ["id1", "id2"] + validate_delete_request(self.logger, request) + + def test_validate_delete_request_empty_table(self): + request = MagicMock() + request.table = "" + request.ids = ["id1"] + with self.assertRaises(SkyflowError) as context: + validate_delete_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_TABLE_VALUE.value) + + def test_validate_delete_request_missing_ids(self): + request = MagicMock() + request.table = "test_table" + request.ids = None + with self.assertRaises(SkyflowError) as context: + validate_delete_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_RECORD_IDS_IN_DELETE.value) + + def test_validate_query_request_valid(self): + request = MagicMock() + request.query = "SELECT * FROM test_table" + validate_query_request(self.logger, request) + + def test_validate_query_request_empty_query(self): + request = MagicMock() + request.query = "" + with self.assertRaises(SkyflowError) as context: + validate_query_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_QUERY.value) + + def test_validate_query_request_invalid_query_type(self): + request = MagicMock() + request.query = 123 + with self.assertRaises(SkyflowError) as context: + validate_query_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_QUERY_TYPE.value.format(str(type(123)))) + + def test_validate_query_request_non_select_query(self): + request = MagicMock() + request.query = "INSERT INTO test_table VALUES (1)" + with self.assertRaises(SkyflowError) as context: + validate_query_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_QUERY_COMMAND.value.format(request.query)) + + def test_validate_get_detect_run_request_valid(self): + request = MagicMock() + request.run_id = "test_run_123" + validate_get_detect_run_request(self.logger, request) + + def test_validate_get_detect_run_request_empty_run_id(self): + request = MagicMock() + request.run_id = "" + with self.assertRaises(SkyflowError) as context: + validate_get_detect_run_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_RUN_ID.value) + + def test_validate_get_detect_run_request_invalid_run_id_type(self): + request = MagicMock() + request.run_id = 123 # Invalid type + with self.assertRaises(SkyflowError) as context: + validate_get_detect_run_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_RUN_ID.value) + + def test_validate_get_request_valid(self): + from skyflow.utils.enums import RedactionType + request = MagicMock() + request.table = "test_table" + request.redaction_type = RedactionType.PLAIN_TEXT + request.column_name = None + request.column_values = None + request.ids = ["id1", "id2"] + request.fields = ["field1", "field2"] + request.offset = None + request.limit = None + request.download_url = False + request.return_tokens = False + validate_get_request(self.logger, request) + + + def test_validate_get_request_invalid_table_type(self): + request = MagicMock() + request.table = 123 + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TABLE_VALUE.value) + + def test_validate_get_request_empty_table(self): + request = MagicMock() + request.table = "" + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_TABLE_VALUE.value) + + def test_validate_get_request_invalid_redaction_type(self): + request = GetRequest( + table="test_table", + fields="invalid", + ids=["id1", "id2"], + redaction_type="invalid" + ) + + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(request.redaction_type))) + + def test_validate_get_request_invalid_fields_type(self): + request= GetRequest( + table="test_table", + fields="invalid" + ) + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_FIELDS_VALUE.value.format(type(request.fields))) + + def test_validate_get_request_empty_fields(self): + request = GetRequest( + table="test_table", + ids=[], + fields=[] + ) + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_FIELDS_VALUE.value.format(type(request.fields))) + + def test_validate_get_request_invalid_column_values_type(self): + request = GetRequest( + table="test_table", + column_name="test_column", + column_values="invalid", + ) + + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_COLUMN_VALUE.value.format(type(request.column_values))) + + def test_validate_get_request_tokens_with_redaction(self): + request = GetRequest( + table="test_table", + return_tokens=True, + redaction_type = RedactionType.PLAIN_TEXT + ) + + with self.assertRaises(SkyflowError) as context: + validate_get_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.REDACTION_WITH_TOKENS_NOT_SUPPORTED.value) + + def test_validate_query_request_valid_complex(self): + request = MagicMock() + request.query = "SELECT * FROM table1 JOIN table2 ON table1.id = table2.id WHERE field = 'value'" + validate_query_request(self.logger, request) + + + def test_validate_query_request_invalid_update(self): + request = MagicMock() + request.query = "UPDATE table SET field = 'value'" # Only SELECT allowed + with self.assertRaises(SkyflowError) as context: + validate_query_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_QUERY_COMMAND.value.format(request.query)) + + def test_validate_update_request_valid(self): + request = MagicMock() + request.table = "test_table" + request.data = {"skyflow_id": "id123", "field1": "value1"} + request.return_tokens = False + request.token_mode = None + request.tokens = None + validate_update_request(self.logger, request) + + def test_validate_update_request_invalid_table_type(self): + request = UpdateRequest( + table=123, + data = {"skyflow_id": "id123"} + ) + with self.assertRaises(SkyflowError) as context: + validate_update_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TABLE_VALUE.value) + + def test_validate_update_request_invalid_token_mode(self): + request = UpdateRequest( + table="test_table", + data = {"skyflow_id": "id123", "field1": "value1"}, + token_mode = "invalid" + ) + with self.assertRaises(SkyflowError) as context: + validate_update_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TOKEN_MODE_TYPE.value) + + def test_validate_detokenize_request_valid(self): + request = MagicMock() + request.data = [{"token": "token123"}] + request.continue_on_error = False + validate_detokenize_request(self.logger, request) + + def test_validate_detokenize_request_empty_data(self): + request = MagicMock() + request.data = [] # Empty list + request.continue_on_error = False + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value) + + def test_validate_detokenize_request_invalid_token(self): + request = MagicMock() + request.data = [{"token": 123}] # Invalid token type + request.continue_on_error = False + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TOKEN_TYPE.value.format("DETOKENIZE")) + + def test_validate_tokenize_request_valid(self): + request = MagicMock() + request.values = [{"value": "test", "column_group": "group1"}] + validate_tokenize_request(self.logger, request) + + + def test_validate_tokenize_request_invalid_values_type(self): + request = MagicMock() + request.values = "invalid" # Should be list + with self.assertRaises(SkyflowError) as context: + validate_tokenize_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETERS.value.format(type(request.values))) + + def test_validate_tokenize_request_empty_values(self): + request = MagicMock() + request.values = [] # Empty list + with self.assertRaises(SkyflowError) as context: + validate_tokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETERS.value) + + def test_validate_tokenize_request_missing_required_fields(self): + request = MagicMock() + request.values = [{"value": "test"}] # Missing column_group + with self.assertRaises(SkyflowError) as context: + validate_tokenize_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETER_KEY.value.format(0)) + + def test_validate_invoke_connection_params_valid(self): + query_params = {"param1": "value1"} + path_params = {"path1": "value1"} + validate_invoke_connection_params(self.logger, query_params, path_params) + + def test_validate_invoke_connection_params_invalid_path_params_type(self): + request = InvokeConnectionRequest( + method="GET", + query_params={"param1": "value1"}, + path_params="invalid" + ) + with self.assertRaises(SkyflowError) as context: + validate_invoke_connection_params(self.logger, request.query_params, request.path_params) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_PATH_PARAMS.value) + + def test_validate_invoke_connection_params_invalid_query_params_type(self): + request = InvokeConnectionRequest( + method="GET", + query_params="invalid", + path_params={"path1": "value1"} + ) + with self.assertRaises(SkyflowError) as context: + validate_invoke_connection_params(self.logger, request.query_params, request.path_params) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_QUERY_PARAMS.value) + + def test_validate_invoke_connection_params_non_string_path_param(self): + request = InvokeConnectionRequest( + method="GET", + query_params={"param1": "value1"}, + path_params={1: "value1"} + ) + with self.assertRaises(SkyflowError) as context: + validate_invoke_connection_params(self.logger, request.query_params, request.path_params) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_PATH_PARAMS.value) + + def test_validate_invoke_connection_params_non_string_query_param_key(self): + request = InvokeConnectionRequest( + method="GET", + query_params={1: "value1"}, + path_params={"path1": "value1"} + ) + with self.assertRaises(SkyflowError) as context: + validate_invoke_connection_params(self.logger, request.query_params, request.path_params) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_QUERY_PARAMS.value) + + def test_validate_invoke_connection_params_non_serializable_query_params(self): + class NonSerializable: + pass + request = InvokeConnectionRequest( + method="GET", + query_params={"param1": NonSerializable()}, + path_params={"path1": "value1"} + ) + with self.assertRaises(SkyflowError) as context: + validate_invoke_connection_params(self.logger, request.query_params, request.path_params) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_QUERY_PARAMS.value) + + def test_validate_deidentify_text_request_valid(self): + request = DeidentifyTextRequest( + text="test", + entities=None, + allow_regex_list=None, + restrict_regex_list = None, + token_format = None, + transformations = None, + ) + validate_deidentify_text_request(self.logger, request) + + def test_validate_reidentify_text_request_valid(self): + request = ReidentifyTextRequest( + text="test", + masked_entities=[DetectEntities.CREDIT_CARD], + redacted_entities=[DetectEntities.SSN], + plain_text_entities=None, + ) + validate_reidentify_text_request(self.logger, request) + + def test_validate_reidentify_text_request_empty_text(self): + request = ReidentifyTextRequest( + text="", + masked_entities=[DetectEntities.CREDIT_CARD], + redacted_entities=[DetectEntities.SSN], + ) + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TEXT_IN_REIDENTIFY.value) + + def test_validate_reidentify_text_request_invalid_redacted_entities(self): + request = ReidentifyTextRequest( + text="test", + redacted_entities="invalid", + ) + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_REDACTED_ENTITIES_IN_REIDENTIFY.value) + + def test_validate_reidentify_text_request_invalid_plain_text_entities(self): + request = ReidentifyTextRequest( + text="test", + plain_text_entities="invalid", + ) + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_PLAIN_TEXT_ENTITIES_IN_REIDENTIFY.value) + + + def test_validate_deidentify_text_request_empty_text(self): + request = DeidentifyTextRequest( + text="", + entities=None, + allow_regex_list=None, + restrict_regex_list=None, + token_format=None, + transformations=None, + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TEXT_IN_DEIDENTIFY.value) + + def test_validate_deidentify_text_request_invalid_text_type(self): + request = DeidentifyTextRequest( + text=["test"], + entities=None, + allow_regex_list=None, + restrict_regex_list=None, + token_format=None, + transformations=None, + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TEXT_IN_DEIDENTIFY.value) + + def test_validate_deidentify_text_request_invalid_entities_type(self): + request = DeidentifyTextRequest( + text="test", + entities="invalid", + allow_regex_list=None, + restrict_regex_list=None, + token_format=None, + transformations=None, + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_ENTITIES_IN_DEIDENTIFY.value) + + def test_validate_deidentify_text_request_invalid_allow_regex(self): + request = DeidentifyTextRequest( + text="test", + allow_regex_list="invalid", + restrict_regex_list=None, + token_format=None, + transformations=None, + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_ALLOW_REGEX_LIST.value) + + def test_validate_deidentify_text_request_invalid_restrict_regex(self): + request = DeidentifyTextRequest( + text="test", + restrict_regex_list="invalid", + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_RESTRICT_REGEX_LIST.value) + + def test_validate_deidentify_text_request_invalid_token_format(self): + request = DeidentifyTextRequest( + text="test", + token_format="invalid", + transformations=None, + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TOKEN_FORMAT.value) + + + def test_validate_reidentify_text_request_valid(self): + request = MagicMock() + request.text = "test text" + request.redacted_entities = None + request.masked_entities = None + request.plain_text_entities = None + validate_reidentify_text_request(self.logger, request) + + def test_validate_reidentify_text_request_empty_text(self): + request = MagicMock() + request.text = "" # Empty text + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TEXT_IN_REIDENTIFY.value) + + def test_validate_reidentify_text_request_invalid_text_type(self): + request = MagicMock() + request.text = 123 # Invalid type + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_TEXT_IN_REIDENTIFY.value) + + def test_validate_reidentify_text_request_invalid_redacted_entities(self): + request = MagicMock() + request.text = "test text" + request.redacted_entities = "invalid" + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_REDACTED_ENTITIES_IN_REIDENTIFY.value) + + def test_validate_reidentify_text_request_invalid_plain_text_entities(self): + request = ReidentifyTextRequest( + text="test text", + plain_text_entities="invalid" + ) + with self.assertRaises(SkyflowError) as context: + validate_reidentify_text_request(self.logger, request) + self.assertEqual(context.exception.message, + SkyflowMessages.Error.INVALID_PLAIN_TEXT_ENTITIES_IN_REIDENTIFY.value) + + def test_validate_deidentify_file_request_valid(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + entities=None, + allow_regex_list=None, + restrict_regex_list=None, + token_format=None, + transformations=None, + output_processed_image=None, + output_ocr_text=None, + masking_method=None, + pixel_density=None, + max_resolution=None, + output_processed_audio=None, + output_transcription=None, + bleep=None, + output_directory=None, + wait_time=None + ) + validate_deidentify_file_request(self.logger, request) + + def test_validate_deidentify_file_request_missing_file(self): + request = DeidentifyFileRequest(file=None) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_FILE_INPUT.value) + + def test_validate_deidentify_file_request_invalid_entities(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + entities="invalid" + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_DETECT_ENTITIES_TYPE.value) + + def test_validate_deidentify_file_request_invalid_allow_regex(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + allow_regex_list="invalid", + entities=[DetectEntities.ACCOUNT_NUMBER] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_ALLOW_REGEX_LIST.value) + + def test_validate_deidentify_file_request_invalid_restrict_regex(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + restrict_regex_list="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_RESTRICT_REGEX_LIST.value) + + def test_validate_deidentify_file_request_invalid_token_format(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + token_format="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TOKEN_FORMAT.value) + + def test_validate_deidentify_file_request_invalid_transformations(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + transformations="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TRANSFORMATIONS.value) + + def test_validate_deidentify_file_request_invalid_output_processed_image(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + output_processed_image="true", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_OUTPUT_PROCESSED_IMAGE.value) + + def test_validate_deidentify_file_request_invalid_output_ocr_text(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + output_ocr_text="true", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_OUTPUT_OCR_TEXT.value) + + def test_validate_deidentify_file_request_invalid_masking_method(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + masking_method="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_MASKING_METHOD.value) + + def test_validate_deidentify_file_request_invalid_pixel_density(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + pixel_density="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_PIXEL_DENSITY.value) + + def test_validate_deidentify_file_request_invalid_max_resolution(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + max_resolution="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_MAXIMUM_RESOLUTION.value) + + def test_validate_deidentify_file_request_invalid_output_processed_audio(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + output_processed_audio="true", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_OUTPUT_PROCESSED_AUDIO.value) + + def test_validate_deidentify_file_request_invalid_output_transcription(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + output_transcription="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_OUTPUT_TRANSCRIPTION.value) + + def test_validate_deidentify_file_request_invalid_wait_time(self): + file_input = FileInput(file_path=self.temp_file_path) + request = DeidentifyFileRequest( + file=file_input, + wait_time="invalid", + entities=[DetectEntities.SSN] + ) + with self.assertRaises(SkyflowError) as context: + validate_deidentify_file_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_WAIT_TIME.value) + + def test_validate_detokenize_request_valid(self): + request = DetokenizeRequest( + data=[{"token": "token123", "redaction": RedactionType.PLAIN_TEXT}], + continue_on_error=False + ) + validate_detokenize_request(self.logger, request) + + def test_validate_detokenize_request_empty_data(self): + request = DetokenizeRequest(data=[], continue_on_error=False) + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value) + + def test_validate_detokenize_request_invalid_token_type(self): + request = DetokenizeRequest(data=[{"token": 123}], continue_on_error=False) + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TOKEN_TYPE.value.format("DETOKENIZE")) + + def test_validate_detokenize_request_missing_token_key(self): + request = DetokenizeRequest(data=[{"not_token": "value"}], continue_on_error=False) + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value.format(str(type(request.data)))) + + def test_validate_detokenize_request_invalid_continue_on_error_type(self): + request = DetokenizeRequest(data=[{"token": "token123"}], continue_on_error="invalid") + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value) + + def test_validate_detokenize_request_invalid_redaction_type(self): + request = DetokenizeRequest(data=[{"token": "token123", "redaction": "invalid"}], continue_on_error=False) + with self.assertRaises(SkyflowError) as context: + validate_detokenize_request(self.logger, request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(str(type("invalid")))) diff --git a/tests/vault/controller/test__detect.py b/tests/vault/controller/test__detect.py index 3096ce08..dc3a753f 100644 --- a/tests/vault/controller/test__detect.py +++ b/tests/vault/controller/test__detect.py @@ -159,7 +159,7 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 word_count=1, char_count=1, size_in_kb=1, duration_in_seconds=None, page_count=None, slide_count=None, entities=[], run_id="runid123", - status="SUCCESS", errors=None)) as mock_parse: + status="SUCCESS")) as mock_parse: result = self.detect.deidentify_file(req) mock_validate.assert_called_once() @@ -184,7 +184,6 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 self.assertIsNone(result.page_count) self.assertIsNone(result.slide_count) self.assertEqual(result.entities, []) - self.assertEqual(result.errors, None) @patch("skyflow.vault.controller._detect.validate_deidentify_file_request") @patch("skyflow.vault.controller._detect.base64") @@ -222,7 +221,7 @@ def test_deidentify_file_audio_success(self, mock_base64, mock_validate): word_count=1, char_count=1, size_in_kb=1, duration_in_seconds=1, page_count=None, slide_count=None, entities=[], run_id="runid456", - status="SUCCESS", errors=None)) as mock_parse: + status="SUCCESS")) as mock_parse: result = self.detect.deidentify_file(req) mock_validate.assert_called_once() files_api.deidentify_audio.assert_called_once() @@ -263,8 +262,7 @@ def test_get_detect_run_success(self, mock_validate): return_value=DeidentifyFileResponse(file="file", type="txt", extension="txt", word_count=1, char_count=1, size_in_kb=1, duration_in_seconds=None, page_count=None, slide_count=None, entities=[], - run_id="runid789", status="SUCCESS", - errors=None)) as mock_parse: + run_id="runid789", status="SUCCESS")) as mock_parse: result = self.detect.get_detect_run(req) mock_validate.assert_called_once() files_api.get_run.assert_called_once() @@ -658,7 +656,11 @@ def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_ba # Setup processed response processed_response = Mock() processed_response.status = "SUCCESS" - processed_response.output = [] + processed_response.output = [ + Mock(processedFile="dGVzdCBjb250ZW", + processedFileType="txt", + processedFileExtension="txt") + ] processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) # Test the method @@ -679,16 +681,14 @@ def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_ba entities=[], run_id="runid123", status="SUCCESS", - errors=None )) as mock_parse: result = self.detect.deidentify_file(req) mock_file.read.assert_called_once() - mock_basename.assert_called_with("/path/to/test.txt") - mock_validate.assert_called_once() files_api.deidentify_text.assert_called_once() + mock_basename.assert_called_with("/path/to/test.txt") mock_poll.assert_called_once() mock_parse.assert_called_once() @@ -710,4 +710,3 @@ def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_ba self.assertIsNone(result.page_count) self.assertIsNone(result.slide_count) self.assertEqual(result.entities, []) - self.assertEqual(result.errors, None) diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py index 8d1d1ab0..4e1a0dda 100644 --- a/tests/vault/controller/test__vault.py +++ b/tests/vault/controller/test__vault.py @@ -30,7 +30,7 @@ def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate) # Mock request request = InsertRequest( - table_name=TABLE_NAME, + table=TABLE_NAME, values=[{"field": "value"}], tokens=None, return_tokens=True, @@ -89,7 +89,7 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val # Mock request with continue_on_error set to False request = InsertRequest( - table_name=TABLE_NAME, + table=TABLE_NAME, values=[{"field": "value"}], tokens=None, return_tokens=True, @@ -129,7 +129,7 @@ def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_val @patch("skyflow.vault.controller._vault.validate_insert_request") def test_insert_handles_generic_error(self, mock_validate): - request = InsertRequest(table_name="test_table", values=[{"column_name": "value"}], return_tokens=False, + request = InsertRequest(table="test_table", values=[{"column_name": "value"}], return_tokens=False, upsert=False, homogeneous=False, continue_on_error=False, token_mode=Mock()) records_api = self.vault_client.get_records_api.return_value @@ -147,7 +147,7 @@ def test_insert_with_continue_on_error_false_when_tokens_are_not_none(self, mock # Mock request with continue_on_error set to False request = InsertRequest( - table_name=TABLE_NAME, + table=TABLE_NAME, values=[{"field": "value"}], tokens=[{"token_field": "token_val1"}], return_tokens=True, From aaa18c7e3f3494f80e9c3b01292de6fd13ca5736 Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Wed, 17 Sep 2025 10:56:44 +0000 Subject: [PATCH 43/60] [AUTOMATED] Private Release 1.15.4.dev0+8e1d653 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index b349bede..6d914ff6 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.1.0b1.dev0+61e368f' +current_version = '1.15.4.dev0+8e1d653' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index fab22bb1..4026c513 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.1.0b1.dev0+61e368f' \ No newline at end of file +SDK_VERSION = '1.15.4.dev0+8e1d653' \ No newline at end of file From dab7b669fbabd477657c9c7c4ecf2ba8ade9728b Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Fri, 19 Sep 2025 19:47:38 +0530 Subject: [PATCH 44/60] SK-2270: revert the fern generated detect changes (#201) --- skyflow/generated/rest/__init__.py | 4 ++-- skyflow/generated/rest/core/client_wrapper.py | 2 +- skyflow/generated/rest/types/__init__.py | 4 ++-- skyflow/generated/rest/types/deidentify_status_response.py | 2 +- skyflow/generated/rest/types/error_string.py | 3 +++ skyflow/generated/rest/types/reidentify_file_response.py | 3 +-- 6 files changed, 10 insertions(+), 8 deletions(-) create mode 100644 skyflow/generated/rest/types/error_string.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index b8309d05..7eda9318 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -28,12 +28,12 @@ EntityTypes, ErrorResponse, ErrorResponseError, + ErrorString, GooglerpcStatus, ProtobufAny, RedactionEnumRedaction, ReidentifyFileResponse, ReidentifyFileResponseOutput, - ReidentifyFileResponseOutputType, ReidentifyFileResponseStatus, ReidentifyStringResponse, RequestActionType, @@ -176,6 +176,7 @@ "EntityTypes", "ErrorResponse", "ErrorResponseError", + "ErrorString", "GooglerpcStatus", "InternalServerError", "NotFoundError", @@ -189,7 +190,6 @@ "ReidentifyFileRequestFormat", "ReidentifyFileResponse", "ReidentifyFileResponseOutput", - "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", "ReidentifyStringRequestFormat", "ReidentifyStringResponse", diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index 5179f373..a3210a7e 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -24,7 +24,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "skyflow_vault", - "X-Fern-SDK-Version": "0.0.323", + "X-Fern-SDK-Version": "0.0.252", **(self.get_custom_headers() or {}), } headers["Authorization"] = f"Bearer {self._get_token()}" diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index 92d826c9..aa9b4a35 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -27,12 +27,12 @@ from .entity_types import EntityTypes from .error_response import ErrorResponse from .error_response_error import ErrorResponseError +from .error_string import ErrorString from .googlerpc_status import GooglerpcStatus from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction from .reidentify_file_response import ReidentifyFileResponse from .reidentify_file_response_output import ReidentifyFileResponseOutput -from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus from .reidentify_string_response import ReidentifyStringResponse from .request_action_type import RequestActionType @@ -106,12 +106,12 @@ "EntityTypes", "ErrorResponse", "ErrorResponseError", + "ErrorString", "GooglerpcStatus", "ProtobufAny", "RedactionEnumRedaction", "ReidentifyFileResponse", "ReidentifyFileResponseOutput", - "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", "ReidentifyStringResponse", "RequestActionType", diff --git a/skyflow/generated/rest/types/deidentify_status_response.py b/skyflow/generated/rest/types/deidentify_status_response.py index 712a85b2..68a6cd3f 100644 --- a/skyflow/generated/rest/types/deidentify_status_response.py +++ b/skyflow/generated/rest/types/deidentify_status_response.py @@ -24,7 +24,7 @@ class DeidentifyStatusResponse(UniversalBaseModel): How the input file was specified. """ - output_type: DeidentifyStatusResponseOutputType = pydantic.Field() + output_type: typing.Optional[DeidentifyStatusResponseOutputType] = pydantic.Field(default=None) """ How the output file is specified. """ diff --git a/skyflow/generated/rest/types/error_string.py b/skyflow/generated/rest/types/error_string.py new file mode 100644 index 00000000..4ebbdff4 --- /dev/null +++ b/skyflow/generated/rest/types/error_string.py @@ -0,0 +1,3 @@ +# This file was auto-generated by Fern from our API Definition. + +ErrorString = str \ No newline at end of file diff --git a/skyflow/generated/rest/types/reidentify_file_response.py b/skyflow/generated/rest/types/reidentify_file_response.py index bd90fb49..c67b41ac 100644 --- a/skyflow/generated/rest/types/reidentify_file_response.py +++ b/skyflow/generated/rest/types/reidentify_file_response.py @@ -5,7 +5,6 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .reidentify_file_response_output import ReidentifyFileResponseOutput -from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus @@ -19,7 +18,7 @@ class ReidentifyFileResponse(UniversalBaseModel): Status of the re-identify operation. """ - output_type: ReidentifyFileResponseOutputType = pydantic.Field() + output_type: typing.Literal["BASE64"] = pydantic.Field(default="BASE64") """ Format of the output file. """ From bdcc5ac585904c164941dd924f2e04f6a82ab0e7 Mon Sep 17 00:00:00 2001 From: raushan-skyflow Date: Fri, 19 Sep 2025 14:17:54 +0000 Subject: [PATCH 45/60] [AUTOMATED] Private Release 1.15.5.dev0+dab7b66 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6d914ff6..a95dc1dd 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.4.dev0+8e1d653' +current_version = '1.15.5.dev0+dab7b66' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 4026c513..f15769f8 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.4.dev0+8e1d653' \ No newline at end of file +SDK_VERSION = '1.15.5.dev0+dab7b66' \ No newline at end of file From 08b535aa0e96f882937e3efa41a8bfe874dc06e1 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow <156889717+saileshwar-skyflow@users.noreply.github.com> Date: Thu, 6 Nov 2025 19:57:07 +0530 Subject: [PATCH 46/60] SK-2353: Update generated code with updated openapi spec file. (#211) * SK-2353: update generated code with updated openapi spec file --- .github/workflows/shared-build-and-deploy.yml | 5 + .github/workflows/shared-tests.yml | 6 + skyflow/generated/rest/__init__.py | 204 ++- skyflow/generated/rest/audit/client.py | 64 + skyflow/generated/rest/client.py | 16 +- skyflow/generated/rest/core/client_wrapper.py | 19 +- skyflow/generated/rest/files/__init__.py | 64 +- skyflow/generated/rest/files/client.py | 1492 +++++++++-------- skyflow/generated/rest/files/raw_client.py | 1395 +++++++-------- .../generated/rest/files/types/__init__.py | 84 +- ...identify_audio_request_file_data_format.py | 5 - ...tify_audio_request_output_transcription.py | 14 - ...ntify_document_request_file_data_format.py | 5 - ...uest_deidentify_audio_entity_types_item.py | 79 + ...t_deidentify_audio_output_transcription.py | 10 + ...equest_deidentify_pdf_entity_types_item.py | 79 + ...uest_deidentify_image_entity_types_item.py | 79 + ...request_deidentify_image_masking_method.py | 5 + ...t_deidentify_document_entity_types_item.py | 79 + ...identify_presentation_entity_types_item.py | 79 + ...eidentify_spreadsheet_entity_types_item.py | 79 + ...ntify_structured_text_entity_types_item.py | 79 + ...quest_deidentify_text_entity_types_item.py | 79 + ...identify_file_request_entity_types_item.py | 79 + ...identify_image_request_file_data_format.py | 7 - ...deidentify_image_request_masking_method.py | 5 - .../deidentify_presentation_request_file.py | 34 - ...y_presentation_request_file_data_format.py | 5 - ...fy_spreadsheet_request_file_data_format.py | 5 - ...deidentify_structured_text_request_file.py | 34 - ...tructured_text_request_file_data_format.py | 5 - .../types/reidentify_file_request_file.py | 34 - ...eidentify_file_request_file_data_format.py | 7 - skyflow/generated/rest/guardrails/client.py | 49 +- .../generated/rest/guardrails/raw_client.py | 45 +- skyflow/generated/rest/records/client.py | 20 + skyflow/generated/rest/strings/__init__.py | 4 +- skyflow/generated/rest/strings/client.py | 150 +- skyflow/generated/rest/strings/raw_client.py | 152 +- .../generated/rest/strings/types/__init__.py | 4 +- ...entify_string_request_entity_types_item.py | 79 + .../types/reidentify_string_request_format.py | 37 - skyflow/generated/rest/types/__init__.py | 136 +- skyflow/generated/rest/types/allow_regex.py | 5 - .../check_guardrails_response_validation.py | 5 - .../generated/rest/types/configuration_id.py | 3 - .../rest/types/deidentified_file_output.py | 46 + ...ed_file_output_processed_file_extension.py | 29 + ...tified_file_output_processed_file_type.py} | 13 +- .../rest/types/deidentify_file_response.py | 6 +- .../rest/types/deidentify_status_response.py | 74 - .../deidentify_status_response_output_type.py | 5 - .../deidentify_status_response_status.py | 5 - .../rest/types/deidentify_string_response.py | 10 +- ...ponse.py => detect_guardrails_response.py} | 20 +- .../detect_guardrails_response_validation.py | 5 + .../rest/types/detect_runs_response.py | 72 + .../types/detect_runs_response_output_type.py | 5 + .../rest/types/detect_runs_response_status.py | 5 + .../generated/rest/types/entity_location.py | 41 - skyflow/generated/rest/types/entity_types.py | 7 - .../rest/types/error_response_error.py | 13 +- skyflow/generated/rest/types/error_string.py | 3 - ...y_file_response_output.py => file_data.py} | 16 +- .../file_data_data_format.py} | 29 +- .../file_data_deidentify_audio.py} | 16 +- .../file_data_deidentify_audio_data_format.py | 5 + .../file_data_deidentify_document.py} | 16 +- ...le_data_deidentify_document_data_format.py | 5 + .../file_data_deidentify_image.py} | 16 +- .../file_data_deidentify_image_data_format.py | 5 + .../file_data_deidentify_pdf.py} | 12 +- .../file_data_deidentify_presentation.py | 34 + ...ata_deidentify_presentation_data_format.py | 5 + .../file_data_deidentify_spreadsheet.py} | 16 +- ...data_deidentify_spreadsheet_data_format.py | 5 + .../file_data_deidentify_structured_text.py} | 16 +- ..._deidentify_structured_text_data_format.py | 5 + .../file_data_deidentify_text.py} | 12 +- .../rest/types/file_data_reidentify_file.py | 34 + .../file_data_reidentify_file_data_format.py | 7 + .../format.py} | 14 +- .../{entity_type.py => format_masked_item.py} | 72 +- .../rest/types/format_plaintext_item.py | 79 + .../rest/types/format_redacted_item.py | 79 + .../rest/types/{vault_id.py => http_code.py} | 2 +- ...tring_response.py => identify_response.py} | 6 +- ..._output.py => reidentified_file_output.py} | 14 +- ...ed_file_output_processed_file_extension.py | 29 + .../rest/types/reidentify_file_response.py | 13 +- .../reidentify_file_response_output_type.py | 2 +- .../types/reidentify_file_response_status.py | 2 +- .../generated/rest/types/restrict_regex.py | 5 - ...rmations_shift_dates.py => shift_dates.py} | 14 +- .../types/shift_dates_entity_types_item.py | 5 + ..._entity.py => string_response_entities.py} | 8 +- skyflow/generated/rest/types/token_type.py | 39 - .../rest/types/token_type_default.py | 5 - .../rest/types/token_type_mapping.py | 47 + .../rest/types/token_type_mapping_default.py | 5 + .../token_type_mapping_entity_only_item.py | 79 + ...en_type_mapping_entity_unq_counter_item.py | 79 + .../token_type_mapping_vault_token_item.py | 79 + .../rest/types/token_type_without_vault.py | 34 - .../types/token_type_without_vault_default.py | 5 - .../generated/rest/types/transformations.py | 9 +- ...ormations_shift_dates_entity_types_item.py | 5 - .../rest/types/word_character_count.py | 37 + skyflow/generated/rest/version.py | 4 +- skyflow/utils/__init__.py | 2 +- skyflow/utils/_utils.py | 12 +- skyflow/utils/validations/_validations.py | 1 - skyflow/vault/controller/_detect.py | 59 +- tests/utils/test__utils.py | 36 +- tests/vault/controller/test__detect.py | 13 +- 115 files changed, 3941 insertions(+), 2490 deletions(-) delete mode 100644 skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_output_transcription.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_masking_method.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_deidentify_document_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_deidentify_presentation_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_deidentify_spreadsheet_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_deidentify_structured_text_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_deidentify_text_entity_types_item.py create mode 100644 skyflow/generated/rest/files/types/deidentify_file_request_entity_types_item.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_presentation_request_file.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py delete mode 100644 skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py delete mode 100644 skyflow/generated/rest/files/types/reidentify_file_request_file.py delete mode 100644 skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py create mode 100644 skyflow/generated/rest/strings/types/deidentify_string_request_entity_types_item.py delete mode 100644 skyflow/generated/rest/strings/types/reidentify_string_request_format.py delete mode 100644 skyflow/generated/rest/types/allow_regex.py delete mode 100644 skyflow/generated/rest/types/check_guardrails_response_validation.py delete mode 100644 skyflow/generated/rest/types/configuration_id.py create mode 100644 skyflow/generated/rest/types/deidentified_file_output.py create mode 100644 skyflow/generated/rest/types/deidentified_file_output_processed_file_extension.py rename skyflow/generated/rest/types/{deidentify_file_output_processed_file_type.py => deidentified_file_output_processed_file_type.py} (55%) delete mode 100644 skyflow/generated/rest/types/deidentify_status_response.py delete mode 100644 skyflow/generated/rest/types/deidentify_status_response_output_type.py delete mode 100644 skyflow/generated/rest/types/deidentify_status_response_status.py rename skyflow/generated/rest/types/{check_guardrails_response.py => detect_guardrails_response.py} (52%) create mode 100644 skyflow/generated/rest/types/detect_guardrails_response_validation.py create mode 100644 skyflow/generated/rest/types/detect_runs_response.py create mode 100644 skyflow/generated/rest/types/detect_runs_response_output_type.py create mode 100644 skyflow/generated/rest/types/detect_runs_response_status.py delete mode 100644 skyflow/generated/rest/types/entity_location.py delete mode 100644 skyflow/generated/rest/types/entity_types.py delete mode 100644 skyflow/generated/rest/types/error_string.py rename skyflow/generated/rest/types/{reidentify_file_response_output.py => file_data.py} (53%) rename skyflow/generated/rest/{files/types/deidentify_file_request_file_data_format.py => types/file_data_data_format.py} (89%) rename skyflow/generated/rest/{files/types/deidentify_file_request_file.py => types/file_data_deidentify_audio.py} (53%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_audio_data_format.py rename skyflow/generated/rest/{files/types/deidentify_audio_request_file.py => types/file_data_deidentify_document.py} (54%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_document_data_format.py rename skyflow/generated/rest/{files/types/deidentify_image_request_file.py => types/file_data_deidentify_image.py} (53%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_image_data_format.py rename skyflow/generated/rest/{files/types/deidentify_pdf_request_file.py => types/file_data_deidentify_pdf.py} (66%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_presentation.py create mode 100644 skyflow/generated/rest/types/file_data_deidentify_presentation_data_format.py rename skyflow/generated/rest/{files/types/deidentify_document_request_file.py => types/file_data_deidentify_spreadsheet.py} (53%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_spreadsheet_data_format.py rename skyflow/generated/rest/{files/types/deidentify_spreadsheet_request_file.py => types/file_data_deidentify_structured_text.py} (53%) create mode 100644 skyflow/generated/rest/types/file_data_deidentify_structured_text_data_format.py rename skyflow/generated/rest/{files/types/deidentify_text_request_file.py => types/file_data_deidentify_text.py} (66%) create mode 100644 skyflow/generated/rest/types/file_data_reidentify_file.py create mode 100644 skyflow/generated/rest/types/file_data_reidentify_file_data_format.py rename skyflow/generated/rest/{files/types/reidentify_file_request_format.py => types/format.py} (58%) rename skyflow/generated/rest/types/{entity_type.py => format_masked_item.py} (98%) create mode 100644 skyflow/generated/rest/types/format_plaintext_item.py create mode 100644 skyflow/generated/rest/types/format_redacted_item.py rename skyflow/generated/rest/types/{vault_id.py => http_code.py} (81%) rename skyflow/generated/rest/types/{reidentify_string_response.py => identify_response.py} (78%) rename skyflow/generated/rest/types/{deidentify_file_output.py => reidentified_file_output.py} (56%) create mode 100644 skyflow/generated/rest/types/reidentified_file_output_processed_file_extension.py delete mode 100644 skyflow/generated/rest/types/restrict_regex.py rename skyflow/generated/rest/types/{transformations_shift_dates.py => shift_dates.py} (75%) create mode 100644 skyflow/generated/rest/types/shift_dates_entity_types_item.py rename skyflow/generated/rest/types/{detected_entity.py => string_response_entities.py} (82%) delete mode 100644 skyflow/generated/rest/types/token_type.py delete mode 100644 skyflow/generated/rest/types/token_type_default.py create mode 100644 skyflow/generated/rest/types/token_type_mapping.py create mode 100644 skyflow/generated/rest/types/token_type_mapping_default.py create mode 100644 skyflow/generated/rest/types/token_type_mapping_entity_only_item.py create mode 100644 skyflow/generated/rest/types/token_type_mapping_entity_unq_counter_item.py create mode 100644 skyflow/generated/rest/types/token_type_mapping_vault_token_item.py delete mode 100644 skyflow/generated/rest/types/token_type_without_vault.py delete mode 100644 skyflow/generated/rest/types/token_type_without_vault_default.py delete mode 100644 skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py create mode 100644 skyflow/generated/rest/types/word_character_count.py diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index 1b0309dd..e826c20b 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -27,6 +27,11 @@ jobs: python -m pip install --upgrade pip pip install setuptools wheel twine + - name: Build and install skyflow package + run: | + python setup.py sdist bdist_wheel + pip install dist/skyflow-*.whl + - name: Resolve Branch for the Tagged Commit id: resolve-branch if: ${{ inputs.tag == 'beta' || inputs.tag == 'public' }} diff --git a/.github/workflows/shared-tests.yml b/.github/workflows/shared-tests.yml index 0ab797d8..73253e3c 100644 --- a/.github/workflows/shared-tests.yml +++ b/.github/workflows/shared-tests.yml @@ -23,6 +23,12 @@ jobs: with: name: "credentials.json" json: ${{ secrets.VALID_SKYFLOW_CREDS_TEST }} + + - name: Build and install skyflow package + run: | + pip install --upgrade pip setuptools wheel + python setup.py sdist bdist_wheel + pip install dist/skyflow-*.whl - name: 'Run Tests' run: | diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index 7eda9318..8a59c25d 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -3,49 +3,69 @@ # isort: skip_file from .types import ( - AllowRegex, AuditEventAuditResourceType, AuditEventContext, AuditEventData, AuditEventHttpInfo, BatchRecordMethod, - CheckGuardrailsResponse, - CheckGuardrailsResponseValidation, - ConfigurationId, ContextAccessType, ContextAuthMode, - DeidentifyFileOutput, - DeidentifyFileOutputProcessedFileType, + DeidentifiedFileOutput, + DeidentifiedFileOutputProcessedFileExtension, + DeidentifiedFileOutputProcessedFileType, DeidentifyFileResponse, - DeidentifyStatusResponse, - DeidentifyStatusResponseOutputType, - DeidentifyStatusResponseStatus, DeidentifyStringResponse, - DetectedEntity, + DetectGuardrailsResponse, + DetectGuardrailsResponseValidation, + DetectRunsResponse, + DetectRunsResponseOutputType, + DetectRunsResponseStatus, DetokenizeRecordResponseValueType, - EntityLocation, - EntityType, - EntityTypes, ErrorResponse, ErrorResponseError, - ErrorString, + FileData, + FileDataDataFormat, + FileDataDeidentifyAudio, + FileDataDeidentifyAudioDataFormat, + FileDataDeidentifyDocument, + FileDataDeidentifyDocumentDataFormat, + FileDataDeidentifyImage, + FileDataDeidentifyImageDataFormat, + FileDataDeidentifyPdf, + FileDataDeidentifyPresentation, + FileDataDeidentifyPresentationDataFormat, + FileDataDeidentifySpreadsheet, + FileDataDeidentifySpreadsheetDataFormat, + FileDataDeidentifyStructuredText, + FileDataDeidentifyStructuredTextDataFormat, + FileDataDeidentifyText, + FileDataReidentifyFile, + FileDataReidentifyFileDataFormat, + Format, + FormatMaskedItem, + FormatPlaintextItem, + FormatRedactedItem, GooglerpcStatus, + HttpCode, + IdentifyResponse, ProtobufAny, RedactionEnumRedaction, + ReidentifiedFileOutput, + ReidentifiedFileOutputProcessedFileExtension, ReidentifyFileResponse, - ReidentifyFileResponseOutput, + ReidentifyFileResponseOutputType, ReidentifyFileResponseStatus, - ReidentifyStringResponse, RequestActionType, ResourceId, - RestrictRegex, - TokenType, - TokenTypeDefault, - TokenTypeWithoutVault, - TokenTypeWithoutVaultDefault, + ShiftDates, + ShiftDatesEntityTypesItem, + StringResponseEntities, + TokenTypeMapping, + TokenTypeMappingDefault, + TokenTypeMappingEntityOnlyItem, + TokenTypeMappingEntityUnqCounterItem, + TokenTypeMappingVaultTokenItem, Transformations, - TransformationsShiftDates, - TransformationsShiftDatesEntityTypesItem, UploadFileV2Response, Uuid, V1AuditAfterOptions, @@ -79,7 +99,7 @@ V1UpdateRecordResponse, V1VaultFieldMapping, V1VaultSchemaConfig, - VaultId, + WordCharacterCount, ) from .errors import BadRequestError, InternalServerError, NotFoundError, UnauthorizedError from . import audit, authentication, bin_lookup, files, guardrails, query, records, strings, tokens @@ -94,38 +114,27 @@ from .client import AsyncSkyflow, Skyflow from .environment import SkyflowEnvironment from .files import ( - DeidentifyAudioRequestFile, - DeidentifyAudioRequestFileDataFormat, - DeidentifyAudioRequestOutputTranscription, - DeidentifyDocumentRequestFile, - DeidentifyDocumentRequestFileDataFormat, - DeidentifyFileRequestFile, - DeidentifyFileRequestFileDataFormat, - DeidentifyImageRequestFile, - DeidentifyImageRequestFileDataFormat, - DeidentifyImageRequestMaskingMethod, - DeidentifyPdfRequestFile, - DeidentifyPresentationRequestFile, - DeidentifyPresentationRequestFileDataFormat, - DeidentifySpreadsheetRequestFile, - DeidentifySpreadsheetRequestFileDataFormat, - DeidentifyStructuredTextRequestFile, - DeidentifyStructuredTextRequestFileDataFormat, - DeidentifyTextRequestFile, - ReidentifyFileRequestFile, - ReidentifyFileRequestFileDataFormat, - ReidentifyFileRequestFormat, + DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem, + DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription, + DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem, + DeidentifyFileImageRequestDeidentifyImageEntityTypesItem, + DeidentifyFileImageRequestDeidentifyImageMaskingMethod, + DeidentifyFileRequestDeidentifyDocumentEntityTypesItem, + DeidentifyFileRequestDeidentifyPresentationEntityTypesItem, + DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem, + DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem, + DeidentifyFileRequestDeidentifyTextEntityTypesItem, + DeidentifyFileRequestEntityTypesItem, ) from .records import ( RecordServiceBulkGetRecordRequestOrderBy, RecordServiceBulkGetRecordRequestRedaction, RecordServiceGetRecordRequestRedaction, ) -from .strings import ReidentifyStringRequestFormat +from .strings import DeidentifyStringRequestEntityTypesItem from .version import __version__ __all__ = [ - "AllowRegex", "AsyncSkyflow", "AuditEventAuditResourceType", "AuditEventContext", @@ -139,45 +148,58 @@ "AuditServiceListAuditEventsRequestSortOpsOrderBy", "BadRequestError", "BatchRecordMethod", - "CheckGuardrailsResponse", - "CheckGuardrailsResponseValidation", - "ConfigurationId", "ContextAccessType", "ContextAuthMode", - "DeidentifyAudioRequestFile", - "DeidentifyAudioRequestFileDataFormat", - "DeidentifyAudioRequestOutputTranscription", - "DeidentifyDocumentRequestFile", - "DeidentifyDocumentRequestFileDataFormat", - "DeidentifyFileOutput", - "DeidentifyFileOutputProcessedFileType", - "DeidentifyFileRequestFile", - "DeidentifyFileRequestFileDataFormat", + "DeidentifiedFileOutput", + "DeidentifiedFileOutputProcessedFileExtension", + "DeidentifiedFileOutputProcessedFileType", + "DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem", + "DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription", + "DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageMaskingMethod", + "DeidentifyFileRequestDeidentifyDocumentEntityTypesItem", + "DeidentifyFileRequestDeidentifyPresentationEntityTypesItem", + "DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem", + "DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem", + "DeidentifyFileRequestDeidentifyTextEntityTypesItem", + "DeidentifyFileRequestEntityTypesItem", "DeidentifyFileResponse", - "DeidentifyImageRequestFile", - "DeidentifyImageRequestFileDataFormat", - "DeidentifyImageRequestMaskingMethod", - "DeidentifyPdfRequestFile", - "DeidentifyPresentationRequestFile", - "DeidentifyPresentationRequestFileDataFormat", - "DeidentifySpreadsheetRequestFile", - "DeidentifySpreadsheetRequestFileDataFormat", - "DeidentifyStatusResponse", - "DeidentifyStatusResponseOutputType", - "DeidentifyStatusResponseStatus", + "DeidentifyStringRequestEntityTypesItem", "DeidentifyStringResponse", - "DeidentifyStructuredTextRequestFile", - "DeidentifyStructuredTextRequestFileDataFormat", - "DeidentifyTextRequestFile", - "DetectedEntity", + "DetectGuardrailsResponse", + "DetectGuardrailsResponseValidation", + "DetectRunsResponse", + "DetectRunsResponseOutputType", + "DetectRunsResponseStatus", "DetokenizeRecordResponseValueType", - "EntityLocation", - "EntityType", - "EntityTypes", "ErrorResponse", "ErrorResponseError", - "ErrorString", + "FileData", + "FileDataDataFormat", + "FileDataDeidentifyAudio", + "FileDataDeidentifyAudioDataFormat", + "FileDataDeidentifyDocument", + "FileDataDeidentifyDocumentDataFormat", + "FileDataDeidentifyImage", + "FileDataDeidentifyImageDataFormat", + "FileDataDeidentifyPdf", + "FileDataDeidentifyPresentation", + "FileDataDeidentifyPresentationDataFormat", + "FileDataDeidentifySpreadsheet", + "FileDataDeidentifySpreadsheetDataFormat", + "FileDataDeidentifyStructuredText", + "FileDataDeidentifyStructuredTextDataFormat", + "FileDataDeidentifyText", + "FileDataReidentifyFile", + "FileDataReidentifyFileDataFormat", + "Format", + "FormatMaskedItem", + "FormatPlaintextItem", + "FormatRedactedItem", "GooglerpcStatus", + "HttpCode", + "IdentifyResponse", "InternalServerError", "NotFoundError", "ProtobufAny", @@ -185,26 +207,24 @@ "RecordServiceBulkGetRecordRequestRedaction", "RecordServiceGetRecordRequestRedaction", "RedactionEnumRedaction", - "ReidentifyFileRequestFile", - "ReidentifyFileRequestFileDataFormat", - "ReidentifyFileRequestFormat", + "ReidentifiedFileOutput", + "ReidentifiedFileOutputProcessedFileExtension", "ReidentifyFileResponse", - "ReidentifyFileResponseOutput", + "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", - "ReidentifyStringRequestFormat", - "ReidentifyStringResponse", "RequestActionType", "ResourceId", - "RestrictRegex", + "ShiftDates", + "ShiftDatesEntityTypesItem", "Skyflow", "SkyflowEnvironment", - "TokenType", - "TokenTypeDefault", - "TokenTypeWithoutVault", - "TokenTypeWithoutVaultDefault", + "StringResponseEntities", + "TokenTypeMapping", + "TokenTypeMappingDefault", + "TokenTypeMappingEntityOnlyItem", + "TokenTypeMappingEntityUnqCounterItem", + "TokenTypeMappingVaultTokenItem", "Transformations", - "TransformationsShiftDates", - "TransformationsShiftDatesEntityTypesItem", "UnauthorizedError", "UploadFileV2Response", "Uuid", @@ -239,7 +259,7 @@ "V1UpdateRecordResponse", "V1VaultFieldMapping", "V1VaultSchemaConfig", - "VaultId", + "WordCharacterCount", "__version__", "audit", "authentication", diff --git a/skyflow/generated/rest/audit/client.py b/skyflow/generated/rest/audit/client.py index 34d589d1..6f1d1039 100644 --- a/skyflow/generated/rest/audit/client.py +++ b/skyflow/generated/rest/audit/client.py @@ -205,7 +205,39 @@ def audit_service_list_audit_events( token="YOUR_TOKEN", ) client.audit.audit_service_list_audit_events( + filter_ops_context_change_id="filterOps.context.changeID", + filter_ops_context_request_id="filterOps.context.requestID", + filter_ops_context_trace_id="filterOps.context.traceID", + filter_ops_context_session_id="filterOps.context.sessionID", + filter_ops_context_actor="filterOps.context.actor", + filter_ops_context_actor_type="NONE", + filter_ops_context_access_type="ACCESS_NONE", + filter_ops_context_ip_address="filterOps.context.ipAddress", + filter_ops_context_origin="filterOps.context.origin", + filter_ops_context_auth_mode="AUTH_NONE", + filter_ops_context_jwt_id="filterOps.context.jwtID", + filter_ops_context_bearer_token_context_id="filterOps.context.bearerTokenContextID", + filter_ops_parent_account_id="filterOps.parentAccountID", filter_ops_account_id="filterOps.accountID", + filter_ops_workspace_id="filterOps.workspaceID", + filter_ops_vault_id="filterOps.vaultID", + filter_ops_resource_i_ds="filterOps.resourceIDs", + filter_ops_action_type="NONE", + filter_ops_resource_type="NONE_API", + filter_ops_tags="filterOps.tags", + filter_ops_response_code=1, + filter_ops_start_time="filterOps.startTime", + filter_ops_end_time="filterOps.endTime", + filter_ops_api_name="filterOps.apiName", + filter_ops_response_message="filterOps.responseMessage", + filter_ops_http_method="filterOps.httpMethod", + filter_ops_http_uri="filterOps.httpURI", + sort_ops_sort_by="sortOps.sortBy", + sort_ops_order_by="ASCENDING", + after_ops_timestamp="afterOps.timestamp", + after_ops_change_id="afterOps.changeID", + limit=1000000, + offset=1000000, ) """ _response = self._raw_client.audit_service_list_audit_events( @@ -431,7 +463,39 @@ async def audit_service_list_audit_events( async def main() -> None: await client.audit.audit_service_list_audit_events( + filter_ops_context_change_id="filterOps.context.changeID", + filter_ops_context_request_id="filterOps.context.requestID", + filter_ops_context_trace_id="filterOps.context.traceID", + filter_ops_context_session_id="filterOps.context.sessionID", + filter_ops_context_actor="filterOps.context.actor", + filter_ops_context_actor_type="NONE", + filter_ops_context_access_type="ACCESS_NONE", + filter_ops_context_ip_address="filterOps.context.ipAddress", + filter_ops_context_origin="filterOps.context.origin", + filter_ops_context_auth_mode="AUTH_NONE", + filter_ops_context_jwt_id="filterOps.context.jwtID", + filter_ops_context_bearer_token_context_id="filterOps.context.bearerTokenContextID", + filter_ops_parent_account_id="filterOps.parentAccountID", filter_ops_account_id="filterOps.accountID", + filter_ops_workspace_id="filterOps.workspaceID", + filter_ops_vault_id="filterOps.vaultID", + filter_ops_resource_i_ds="filterOps.resourceIDs", + filter_ops_action_type="NONE", + filter_ops_resource_type="NONE_API", + filter_ops_tags="filterOps.tags", + filter_ops_response_code=1, + filter_ops_start_time="filterOps.startTime", + filter_ops_end_time="filterOps.endTime", + filter_ops_api_name="filterOps.apiName", + filter_ops_response_message="filterOps.responseMessage", + filter_ops_http_method="filterOps.httpMethod", + filter_ops_http_uri="filterOps.httpURI", + sort_ops_sort_by="sortOps.sortBy", + sort_ops_order_by="ASCENDING", + after_ops_timestamp="afterOps.timestamp", + after_ops_change_id="afterOps.changeID", + limit=1000000, + offset=1000000, ) diff --git a/skyflow/generated/rest/client.py b/skyflow/generated/rest/client.py index e111c0b2..e8abdf54 100644 --- a/skyflow/generated/rest/client.py +++ b/skyflow/generated/rest/client.py @@ -34,7 +34,7 @@ class Skyflow: - token : typing.Union[str, typing.Callable[[], str]] + token : typing.Optional[typing.Union[str, typing.Callable[[], str]]] headers : typing.Optional[typing.Dict[str, str]] Additional headers to send with every request. @@ -61,7 +61,7 @@ def __init__( *, base_url: typing.Optional[str] = None, environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, - token: typing.Union[str, typing.Callable[[], str]], + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, @@ -87,9 +87,9 @@ def __init__( self.tokens = TokensClient(client_wrapper=self._client_wrapper) self.query = QueryClient(client_wrapper=self._client_wrapper) self.authentication = AuthenticationClient(client_wrapper=self._client_wrapper) - self.guardrails = GuardrailsClient(client_wrapper=self._client_wrapper) - self.strings = StringsClient(client_wrapper=self._client_wrapper) self.files = FilesClient(client_wrapper=self._client_wrapper) + self.strings = StringsClient(client_wrapper=self._client_wrapper) + self.guardrails = GuardrailsClient(client_wrapper=self._client_wrapper) class AsyncSkyflow: @@ -110,7 +110,7 @@ class AsyncSkyflow: - token : typing.Union[str, typing.Callable[[], str]] + token : typing.Optional[typing.Union[str, typing.Callable[[], str]]] headers : typing.Optional[typing.Dict[str, str]] Additional headers to send with every request. @@ -137,7 +137,7 @@ def __init__( *, base_url: typing.Optional[str] = None, environment: SkyflowEnvironment = SkyflowEnvironment.PRODUCTION, - token: typing.Union[str, typing.Callable[[], str]], + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, @@ -163,9 +163,9 @@ def __init__( self.tokens = AsyncTokensClient(client_wrapper=self._client_wrapper) self.query = AsyncQueryClient(client_wrapper=self._client_wrapper) self.authentication = AsyncAuthenticationClient(client_wrapper=self._client_wrapper) - self.guardrails = AsyncGuardrailsClient(client_wrapper=self._client_wrapper) - self.strings = AsyncStringsClient(client_wrapper=self._client_wrapper) self.files = AsyncFilesClient(client_wrapper=self._client_wrapper) + self.strings = AsyncStringsClient(client_wrapper=self._client_wrapper) + self.guardrails = AsyncGuardrailsClient(client_wrapper=self._client_wrapper) def _get_base_url(*, base_url: typing.Optional[str] = None, environment: SkyflowEnvironment) -> str: diff --git a/skyflow/generated/rest/core/client_wrapper.py b/skyflow/generated/rest/core/client_wrapper.py index a3210a7e..355d775e 100644 --- a/skyflow/generated/rest/core/client_wrapper.py +++ b/skyflow/generated/rest/core/client_wrapper.py @@ -10,7 +10,7 @@ class BaseClientWrapper: def __init__( self, *, - token: typing.Union[str, typing.Callable[[], str]], + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, @@ -22,16 +22,19 @@ def __init__( def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { + "User-Agent": "skyflow/1.16.1", "X-Fern-Language": "Python", - "X-Fern-SDK-Name": "skyflow_vault", - "X-Fern-SDK-Version": "0.0.252", + "X-Fern-SDK-Name": "skyflow", + "X-Fern-SDK-Version": "1.16.1", **(self.get_custom_headers() or {}), } - headers["Authorization"] = f"Bearer {self._get_token()}" + token = self._get_token() + if token is not None: + headers["Authorization"] = f"Bearer {token}" return headers - def _get_token(self) -> str: - if isinstance(self._token, str): + def _get_token(self) -> typing.Optional[str]: + if isinstance(self._token, str) or self._token is None: return self._token else: return self._token() @@ -50,7 +53,7 @@ class SyncClientWrapper(BaseClientWrapper): def __init__( self, *, - token: typing.Union[str, typing.Callable[[], str]], + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, @@ -69,7 +72,7 @@ class AsyncClientWrapper(BaseClientWrapper): def __init__( self, *, - token: typing.Union[str, typing.Callable[[], str]], + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, headers: typing.Optional[typing.Dict[str, str]] = None, base_url: str, timeout: typing.Optional[float] = None, diff --git a/skyflow/generated/rest/files/__init__.py b/skyflow/generated/rest/files/__init__.py index b1679867..f313ad67 100644 --- a/skyflow/generated/rest/files/__init__.py +++ b/skyflow/generated/rest/files/__init__.py @@ -3,49 +3,29 @@ # isort: skip_file from .types import ( - DeidentifyAudioRequestFile, - DeidentifyAudioRequestFileDataFormat, - DeidentifyAudioRequestOutputTranscription, - DeidentifyDocumentRequestFile, - DeidentifyDocumentRequestFileDataFormat, - DeidentifyFileRequestFile, - DeidentifyFileRequestFileDataFormat, - DeidentifyImageRequestFile, - DeidentifyImageRequestFileDataFormat, - DeidentifyImageRequestMaskingMethod, - DeidentifyPdfRequestFile, - DeidentifyPresentationRequestFile, - DeidentifyPresentationRequestFileDataFormat, - DeidentifySpreadsheetRequestFile, - DeidentifySpreadsheetRequestFileDataFormat, - DeidentifyStructuredTextRequestFile, - DeidentifyStructuredTextRequestFileDataFormat, - DeidentifyTextRequestFile, - ReidentifyFileRequestFile, - ReidentifyFileRequestFileDataFormat, - ReidentifyFileRequestFormat, + DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem, + DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription, + DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem, + DeidentifyFileImageRequestDeidentifyImageEntityTypesItem, + DeidentifyFileImageRequestDeidentifyImageMaskingMethod, + DeidentifyFileRequestDeidentifyDocumentEntityTypesItem, + DeidentifyFileRequestDeidentifyPresentationEntityTypesItem, + DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem, + DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem, + DeidentifyFileRequestDeidentifyTextEntityTypesItem, + DeidentifyFileRequestEntityTypesItem, ) __all__ = [ - "DeidentifyAudioRequestFile", - "DeidentifyAudioRequestFileDataFormat", - "DeidentifyAudioRequestOutputTranscription", - "DeidentifyDocumentRequestFile", - "DeidentifyDocumentRequestFileDataFormat", - "DeidentifyFileRequestFile", - "DeidentifyFileRequestFileDataFormat", - "DeidentifyImageRequestFile", - "DeidentifyImageRequestFileDataFormat", - "DeidentifyImageRequestMaskingMethod", - "DeidentifyPdfRequestFile", - "DeidentifyPresentationRequestFile", - "DeidentifyPresentationRequestFileDataFormat", - "DeidentifySpreadsheetRequestFile", - "DeidentifySpreadsheetRequestFileDataFormat", - "DeidentifyStructuredTextRequestFile", - "DeidentifyStructuredTextRequestFileDataFormat", - "DeidentifyTextRequestFile", - "ReidentifyFileRequestFile", - "ReidentifyFileRequestFileDataFormat", - "ReidentifyFileRequestFormat", + "DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem", + "DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription", + "DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageMaskingMethod", + "DeidentifyFileRequestDeidentifyDocumentEntityTypesItem", + "DeidentifyFileRequestDeidentifyPresentationEntityTypesItem", + "DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem", + "DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem", + "DeidentifyFileRequestDeidentifyTextEntityTypesItem", + "DeidentifyFileRequestEntityTypesItem", ] diff --git a/skyflow/generated/rest/files/client.py b/skyflow/generated/rest/files/client.py index 4d5d548b..539d2161 100644 --- a/skyflow/generated/rest/files/client.py +++ b/skyflow/generated/rest/files/client.py @@ -4,32 +4,54 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.allow_regex import AllowRegex -from ..types.configuration_id import ConfigurationId from ..types.deidentify_file_response import DeidentifyFileResponse -from ..types.deidentify_status_response import DeidentifyStatusResponse -from ..types.entity_types import EntityTypes +from ..types.detect_runs_response import DetectRunsResponse +from ..types.file_data import FileData +from ..types.file_data_deidentify_audio import FileDataDeidentifyAudio +from ..types.file_data_deidentify_document import FileDataDeidentifyDocument +from ..types.file_data_deidentify_image import FileDataDeidentifyImage +from ..types.file_data_deidentify_pdf import FileDataDeidentifyPdf +from ..types.file_data_deidentify_presentation import FileDataDeidentifyPresentation +from ..types.file_data_deidentify_spreadsheet import FileDataDeidentifySpreadsheet +from ..types.file_data_deidentify_structured_text import FileDataDeidentifyStructuredText +from ..types.file_data_deidentify_text import FileDataDeidentifyText +from ..types.file_data_reidentify_file import FileDataReidentifyFile +from ..types.format import Format from ..types.reidentify_file_response import ReidentifyFileResponse -from ..types.resource_id import ResourceId -from ..types.restrict_regex import RestrictRegex -from ..types.token_type_without_vault import TokenTypeWithoutVault +from ..types.token_type_mapping import TokenTypeMapping from ..types.transformations import Transformations -from ..types.uuid_ import Uuid -from ..types.vault_id import VaultId from .raw_client import AsyncRawFilesClient, RawFilesClient -from .types.deidentify_audio_request_file import DeidentifyAudioRequestFile -from .types.deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription -from .types.deidentify_document_request_file import DeidentifyDocumentRequestFile -from .types.deidentify_file_request_file import DeidentifyFileRequestFile -from .types.deidentify_image_request_file import DeidentifyImageRequestFile -from .types.deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod -from .types.deidentify_pdf_request_file import DeidentifyPdfRequestFile -from .types.deidentify_presentation_request_file import DeidentifyPresentationRequestFile -from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile -from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile -from .types.deidentify_text_request_file import DeidentifyTextRequestFile -from .types.reidentify_file_request_file import ReidentifyFileRequestFile -from .types.reidentify_file_request_format import ReidentifyFileRequestFormat +from .types.deidentify_file_audio_request_deidentify_audio_entity_types_item import ( + DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem, +) +from .types.deidentify_file_audio_request_deidentify_audio_output_transcription import ( + DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription, +) +from .types.deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item import ( + DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem, +) +from .types.deidentify_file_image_request_deidentify_image_entity_types_item import ( + DeidentifyFileImageRequestDeidentifyImageEntityTypesItem, +) +from .types.deidentify_file_image_request_deidentify_image_masking_method import ( + DeidentifyFileImageRequestDeidentifyImageMaskingMethod, +) +from .types.deidentify_file_request_deidentify_document_entity_types_item import ( + DeidentifyFileRequestDeidentifyDocumentEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_presentation_entity_types_item import ( + DeidentifyFileRequestDeidentifyPresentationEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_spreadsheet_entity_types_item import ( + DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_structured_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyTextEntityTypesItem, +) +from .types.deidentify_file_request_entity_types_item import DeidentifyFileRequestEntityTypesItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -53,14 +75,14 @@ def with_raw_response(self) -> RawFilesClient: def deidentify_file( self, *, - vault_id: VaultId, - file: DeidentifyFileRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileData, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ @@ -68,439 +90,492 @@ def deidentify_file( Parameters ---------- - vault_id : VaultId + file : FileData - file : DeidentifyFileRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyFileRequestFile + from skyflow import FileData, Skyflow client = Skyflow( token="YOUR_TOKEN", ) client.files.deidentify_file( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyFileRequestFile( - base_64="Zm9vYmFy", - data_format="txt", + file=FileData( + base_64="base64", + data_format="mp3", ), + vault_id="vault_id", ) """ _response = self._raw_client.deidentify_file( - vault_id=vault_id, file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_document( + def deidentify_audio( self, *, - vault_id: VaultId, - file: DeidentifyDocumentRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyAudio, + vault_id: str, + output_transcription: typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] = OMIT, + output_processed_audio: typing.Optional[bool] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[int] = OMIT, + bleep_gain: typing.Optional[int] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyAudio - file : DeidentifyDocumentRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + output_transcription : typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] + Type of transcription to output. - entity_types : typing.Optional[EntityTypes] + output_processed_audio : typing.Optional[bool] + Whether to include the processed audio file in the response. - token_type : typing.Optional[TokenTypeWithoutVault] + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. - allow_regex : typing.Optional[AllowRegex] + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. - restrict_regex : typing.Optional[RestrictRegex] + bleep_frequency : typing.Optional[int] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_gain : typing.Optional[int] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + entity_types : typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] + Entities to detect and de-identify. + + token_type : typing.Optional[TokenTypeMapping] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyDocumentRequestFile + from skyflow import FileDataDeidentifyAudio, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_document( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyDocumentRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="docx", + client.files.deidentify_audio( + file=FileDataDeidentifyAudio( + base_64="base64", + data_format="mp3", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_document( - vault_id=vault_id, + _response = self._raw_client.deidentify_audio( file=file, - configuration_id=configuration_id, + vault_id=vault_id, + output_transcription=output_transcription, + output_processed_audio=output_processed_audio, + bleep_start_padding=bleep_start_padding, + bleep_stop_padding=bleep_stop_padding, + bleep_frequency=bleep_frequency, + bleep_gain=bleep_gain, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_pdf( + def deidentify_document( self, *, - vault_id: VaultId, - file: DeidentifyPdfRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[float] = OMIT, - max_resolution: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyDocument, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyDocument - file : DeidentifyPdfRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] + Entities to detect and de-identify. - density : typing.Optional[float] - Pixel density at which to process the PDF file. + token_type : typing.Optional[TokenTypeMapping] - max_resolution : typing.Optional[float] - Max resolution at which to process the PDF file. + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - entity_types : typing.Optional[EntityTypes] - - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyPdfRequestFile + from skyflow import FileDataDeidentifyDocument, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_pdf( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyPdfRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + client.files.deidentify_document( + file=FileDataDeidentifyDocument( + base_64="base64", + data_format="pdf", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_pdf( - vault_id=vault_id, + _response = self._raw_client.deidentify_document( file=file, - configuration_id=configuration_id, - density=density, - max_resolution=max_resolution, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_image( + def deidentify_pdf( self, *, - vault_id: VaultId, - file: DeidentifyImageRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_image: typing.Optional[bool] = OMIT, - output_ocr_text: typing.Optional[bool] = OMIT, - masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPdf, + vault_id: str, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyImageRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyPdf - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - output_processed_image : typing.Optional[bool] - If `true`, includes processed image in the output. - - output_ocr_text : typing.Optional[bool] - If `true`, includes OCR text output in the response. + density : typing.Optional[int] + Pixel density at which to process the PDF file. - masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] - Method to mask the entities in the image. + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyImageRequestFile + from skyflow import FileDataDeidentifyPdf, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_image( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyImageRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="jpg", + client.files.deidentify_pdf( + file=FileDataDeidentifyPdf( + base_64="base64", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_image( - vault_id=vault_id, + _response = self._raw_client.deidentify_pdf( file=file, - configuration_id=configuration_id, - output_processed_image=output_processed_image, - output_ocr_text=output_ocr_text, - masking_method=masking_method, + vault_id=vault_id, + density=density, + max_resolution=max_resolution, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_text( + def deidentify_image( self, *, - vault_id: VaultId, - file: DeidentifyTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyImage, + vault_id: str, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyImage - file : DeidentifyTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes text detected by OCR in the response. - entity_types : typing.Optional[EntityTypes] + masking_method : typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyTextRequestFile + from skyflow import FileDataDeidentifyImage, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_text( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyTextRequestFile( - base_64="Zm9vYmFy", + client.files.deidentify_image( + file=FileDataDeidentifyImage( + base_64="base64", + data_format="jpg", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_text( - vault_id=vault_id, + _response = self._raw_client.deidentify_image( file=file, - configuration_id=configuration_id, + vault_id=vault_id, + output_processed_image=output_processed_image, + output_ocr_text=output_ocr_text, + masking_method=masking_method, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_structured_text( + def deidentify_presentation( self, *, - vault_id: VaultId, - file: DeidentifyStructuredTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPresentation, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPresentation - file : DeidentifyStructuredTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyStructuredTextRequestFile + from skyflow import FileDataDeidentifyPresentation, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_structured_text( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyStructuredTextRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="json", + client.files.deidentify_presentation( + file=FileDataDeidentifyPresentation( + base_64="base64", + data_format="ppt", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_structured_text( - vault_id=vault_id, + _response = self._raw_client.deidentify_presentation( file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data @@ -508,14 +583,16 @@ def deidentify_structured_text( def deidentify_spreadsheet( self, *, - vault_id: VaultId, - file: DeidentifySpreadsheetRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifySpreadsheet, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ @@ -523,319 +600,299 @@ def deidentify_spreadsheet( Parameters ---------- - vault_id : VaultId - - file : DeidentifySpreadsheetRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifySpreadsheet - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifySpreadsheetRequestFile + from skyflow import FileDataDeidentifySpreadsheet, Skyflow client = Skyflow( token="YOUR_TOKEN", ) client.files.deidentify_spreadsheet( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifySpreadsheetRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + file=FileDataDeidentifySpreadsheet( + base_64="base64", data_format="csv", ), + vault_id="vault_id", ) """ _response = self._raw_client.deidentify_spreadsheet( - vault_id=vault_id, file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_presentation( + def deidentify_structured_text( self, *, - vault_id: VaultId, - file: DeidentifyPresentationRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyStructuredText, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyStructuredText - file : DeidentifyPresentationRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyPresentationRequestFile + from skyflow import FileDataDeidentifyStructuredText, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_presentation( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyPresentationRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="pptx", + client.files.deidentify_structured_text( + file=FileDataDeidentifyStructuredText( + base_64="base64", + data_format="json", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_presentation( - vault_id=vault_id, + _response = self._raw_client.deidentify_structured_text( file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def deidentify_audio( + def deidentify_text( self, *, - vault_id: VaultId, - file: DeidentifyAudioRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_audio: typing.Optional[bool] = OMIT, - output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, - bleep_gain: typing.Optional[float] = OMIT, - bleep_frequency: typing.Optional[float] = OMIT, - bleep_start_padding: typing.Optional[float] = OMIT, - bleep_stop_padding: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyText, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyText - file : DeidentifyAudioRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] + Entities to detect and de-identify. - output_processed_audio : typing.Optional[bool] - If `true`, includes processed audio file in the response. + token_type : typing.Optional[TokenTypeMapping] - output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] - Type of transcription to output. + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - bleep_gain : typing.Optional[float] - Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - - bleep_frequency : typing.Optional[float] - The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - - bleep_start_padding : typing.Optional[float] - Padding added to the beginning of a bleep, in seconds. - - bleep_stop_padding : typing.Optional[float] - Padding added to the end of a bleep, in seconds. - - entity_types : typing.Optional[EntityTypes] - - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- - from skyflow import Skyflow - from skyflow.files import DeidentifyAudioRequestFile + from skyflow import FileDataDeidentifyText, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.deidentify_audio( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyAudioRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="mp3", + client.files.deidentify_text( + file=FileDataDeidentifyText( + base_64="base64", ), + vault_id="vault_id", ) """ - _response = self._raw_client.deidentify_audio( - vault_id=vault_id, + _response = self._raw_client.deidentify_text( file=file, - configuration_id=configuration_id, - output_processed_audio=output_processed_audio, - output_transcription=output_transcription, - bleep_gain=bleep_gain, - bleep_frequency=bleep_frequency, - bleep_start_padding=bleep_start_padding, - bleep_stop_padding=bleep_stop_padding, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - def get_run( - self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None - ) -> DeidentifyStatusResponse: + def reidentify_file( + self, + *, + file: FileDataReidentifyFile, + vault_id: str, + format: typing.Optional[Format] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyFileResponse: """ - Returns the status of the detect run. + Re-identifies tokens in a file. Parameters ---------- - run_id : Uuid - ID of the detect run. + file : FileDataReidentifyFile - vault_id : ResourceId - ID of the vault. + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - DeidentifyStatusResponse - A successful response. + ReidentifyFileResponse + OK Examples -------- - from skyflow import Skyflow + from skyflow import FileDataReidentifyFile, Skyflow client = Skyflow( token="YOUR_TOKEN", ) - client.files.get_run( - run_id="run_id", + client.files.reidentify_file( + file=FileDataReidentifyFile( + base_64="base64", + data_format="txt", + ), vault_id="vault_id", ) """ - _response = self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) + _response = self._raw_client.reidentify_file( + file=file, vault_id=vault_id, format=format, request_options=request_options + ) return _response.data - def reidentify_file( + def get_run( self, + run_id: str, *, - vault_id: VaultId, - file: ReidentifyFileRequestFile, - format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None, - ) -> ReidentifyFileResponse: + ) -> DetectRunsResponse: """ - Re-identifies tokens in a file. + Returns the status of a detect run. Parameters ---------- - vault_id : VaultId + run_id : str - file : ReidentifyFileRequestFile - File to re-identify. Files are specified as Base64-encoded data or an EFS path. - - format : typing.Optional[ReidentifyFileRequestFormat] - Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + vault_id : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - ReidentifyFileResponse - A successful response. + DetectRunsResponse + OK Examples -------- from skyflow import Skyflow - from skyflow.files import ReidentifyFileRequestFile client = Skyflow( token="YOUR_TOKEN", ) - client.files.reidentify_file( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=ReidentifyFileRequestFile( - base_64="Zm9vYmFy", - data_format="txt", - ), + client.files.get_run( + run_id="run_id", + vault_id="vault_id", ) """ - _response = self._raw_client.reidentify_file( - vault_id=vault_id, file=file, format=format, request_options=request_options - ) + _response = self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) return _response.data @@ -857,14 +914,14 @@ def with_raw_response(self) -> AsyncRawFilesClient: async def deidentify_file( self, *, - vault_id: VaultId, - file: DeidentifyFileRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileData, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ @@ -872,37 +929,40 @@ async def deidentify_file( Parameters ---------- - vault_id : VaultId + file : FileData - file : DeidentifyFileRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyFileRequestFile + from skyflow import AsyncSkyflow, FileData client = AsyncSkyflow( token="YOUR_TOKEN", @@ -911,78 +971,105 @@ async def deidentify_file( async def main() -> None: await client.files.deidentify_file( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyFileRequestFile( - base_64="Zm9vYmFy", - data_format="txt", + file=FileData( + base_64="base64", + data_format="mp3", ), + vault_id="vault_id", ) asyncio.run(main()) """ _response = await self._raw_client.deidentify_file( - vault_id=vault_id, file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_document( + async def deidentify_audio( self, *, - vault_id: VaultId, - file: DeidentifyDocumentRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyAudio, + vault_id: str, + output_transcription: typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] = OMIT, + output_processed_audio: typing.Optional[bool] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[int] = OMIT, + bleep_gain: typing.Optional[int] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyAudio - file : DeidentifyDocumentRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + output_transcription : typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] + Type of transcription to output. - entity_types : typing.Optional[EntityTypes] + output_processed_audio : typing.Optional[bool] + Whether to include the processed audio file in the response. - token_type : typing.Optional[TokenTypeWithoutVault] + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. - allow_regex : typing.Optional[AllowRegex] + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. - restrict_regex : typing.Optional[RestrictRegex] + bleep_frequency : typing.Optional[int] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. + + bleep_gain : typing.Optional[int] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. + + entity_types : typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] + Entities to detect and de-identify. + + token_type : typing.Optional[TokenTypeMapping] + + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyDocumentRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyAudio client = AsyncSkyflow( token="YOUR_TOKEN", @@ -990,87 +1077,88 @@ async def deidentify_document( async def main() -> None: - await client.files.deidentify_document( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyDocumentRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="docx", + await client.files.deidentify_audio( + file=FileDataDeidentifyAudio( + base_64="base64", + data_format="mp3", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_document( - vault_id=vault_id, + _response = await self._raw_client.deidentify_audio( file=file, - configuration_id=configuration_id, + vault_id=vault_id, + output_transcription=output_transcription, + output_processed_audio=output_processed_audio, + bleep_start_padding=bleep_start_padding, + bleep_stop_padding=bleep_stop_padding, + bleep_frequency=bleep_frequency, + bleep_gain=bleep_gain, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_pdf( + async def deidentify_document( self, *, - vault_id: VaultId, - file: DeidentifyPdfRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[float] = OMIT, - max_resolution: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyDocument, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyDocument - file : DeidentifyPdfRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] + Entities to detect and de-identify. - density : typing.Optional[float] - Pixel density at which to process the PDF file. + token_type : typing.Optional[TokenTypeMapping] - max_resolution : typing.Optional[float] - Max resolution at which to process the PDF file. + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - entity_types : typing.Optional[EntityTypes] - - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyPdfRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyDocument client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1078,92 +1166,92 @@ async def deidentify_pdf( async def main() -> None: - await client.files.deidentify_pdf( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyPdfRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + await client.files.deidentify_document( + file=FileDataDeidentifyDocument( + base_64="base64", + data_format="pdf", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_pdf( - vault_id=vault_id, + _response = await self._raw_client.deidentify_document( file=file, - configuration_id=configuration_id, - density=density, - max_resolution=max_resolution, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_image( + async def deidentify_pdf( self, *, - vault_id: VaultId, - file: DeidentifyImageRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_image: typing.Optional[bool] = OMIT, - output_ocr_text: typing.Optional[bool] = OMIT, - masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPdf, + vault_id: str, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyImageRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyPdf - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - output_processed_image : typing.Optional[bool] - If `true`, includes processed image in the output. - - output_ocr_text : typing.Optional[bool] - If `true`, includes OCR text output in the response. + density : typing.Optional[int] + Pixel density at which to process the PDF file. - masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] - Method to mask the entities in the image. + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyImageRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyPdf client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1171,82 +1259,95 @@ async def deidentify_image( async def main() -> None: - await client.files.deidentify_image( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyImageRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="jpg", + await client.files.deidentify_pdf( + file=FileDataDeidentifyPdf( + base_64="base64", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_image( - vault_id=vault_id, + _response = await self._raw_client.deidentify_pdf( file=file, - configuration_id=configuration_id, - output_processed_image=output_processed_image, - output_ocr_text=output_ocr_text, - masking_method=masking_method, + vault_id=vault_id, + density=density, + max_resolution=max_resolution, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_text( + async def deidentify_image( self, *, - vault_id: VaultId, - file: DeidentifyTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyImage, + vault_id: str, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyImage - file : DeidentifyTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. + + output_ocr_text : typing.Optional[bool] + If `true`, includes text detected by OCR in the response. - entity_types : typing.Optional[EntityTypes] + masking_method : typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] + Method to mask the entities in the image. + + entity_types : typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyTextRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyImage client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1254,78 +1355,87 @@ async def deidentify_text( async def main() -> None: - await client.files.deidentify_text( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyTextRequestFile( - base_64="Zm9vYmFy", + await client.files.deidentify_image( + file=FileDataDeidentifyImage( + base_64="base64", + data_format="jpg", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_text( - vault_id=vault_id, + _response = await self._raw_client.deidentify_image( file=file, - configuration_id=configuration_id, + vault_id=vault_id, + output_processed_image=output_processed_image, + output_ocr_text=output_ocr_text, + masking_method=masking_method, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_structured_text( + async def deidentify_presentation( self, *, - vault_id: VaultId, - file: DeidentifyStructuredTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPresentation, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPresentation - file : DeidentifyStructuredTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyStructuredTextRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyPresentation client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1333,26 +1443,26 @@ async def deidentify_structured_text( async def main() -> None: - await client.files.deidentify_structured_text( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyStructuredTextRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="json", + await client.files.deidentify_presentation( + file=FileDataDeidentifyPresentation( + base_64="base64", + data_format="ppt", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_structured_text( - vault_id=vault_id, + _response = await self._raw_client.deidentify_presentation( file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data @@ -1360,14 +1470,16 @@ async def main() -> None: async def deidentify_spreadsheet( self, *, - vault_id: VaultId, - file: DeidentifySpreadsheetRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifySpreadsheet, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ @@ -1375,37 +1487,40 @@ async def deidentify_spreadsheet( Parameters ---------- - vault_id : VaultId - - file : DeidentifySpreadsheetRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifySpreadsheet - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifySpreadsheetRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifySpreadsheet client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1414,78 +1529,83 @@ async def deidentify_spreadsheet( async def main() -> None: await client.files.deidentify_spreadsheet( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifySpreadsheetRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", + file=FileDataDeidentifySpreadsheet( + base_64="base64", data_format="csv", ), + vault_id="vault_id", ) asyncio.run(main()) """ _response = await self._raw_client.deidentify_spreadsheet( - vault_id=vault_id, file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_presentation( + async def deidentify_structured_text( self, *, - vault_id: VaultId, - file: DeidentifyPresentationRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyStructuredText, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyStructuredText - file : DeidentifyPresentationRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyPresentationRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyStructuredText client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1493,103 +1613,82 @@ async def deidentify_presentation( async def main() -> None: - await client.files.deidentify_presentation( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyPresentationRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="pptx", + await client.files.deidentify_structured_text( + file=FileDataDeidentifyStructuredText( + base_64="base64", + data_format="json", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_presentation( - vault_id=vault_id, + _response = await self._raw_client.deidentify_structured_text( file=file, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def deidentify_audio( + async def deidentify_text( self, *, - vault_id: VaultId, - file: DeidentifyAudioRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_audio: typing.Optional[bool] = OMIT, - output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, - bleep_gain: typing.Optional[float] = OMIT, - bleep_frequency: typing.Optional[float] = OMIT, - bleep_start_padding: typing.Optional[float] = OMIT, - bleep_stop_padding: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyText, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyFileResponse: """ - De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyText - file : DeidentifyAudioRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] + Entities to detect and de-identify. - output_processed_audio : typing.Optional[bool] - If `true`, includes processed audio file in the response. + token_type : typing.Optional[TokenTypeMapping] - output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] - Type of transcription to output. + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - bleep_gain : typing.Optional[float] - Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - - bleep_frequency : typing.Optional[float] - The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - - bleep_start_padding : typing.Optional[float] - Padding added to the beginning of a bleep, in seconds. - - bleep_stop_padding : typing.Optional[float] - Padding added to the end of a bleep, in seconds. - - entity_types : typing.Optional[EntityTypes] - - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyFileResponse - A successful response. + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow - from skyflow.files import DeidentifyAudioRequestFile + from skyflow import AsyncSkyflow, FileDataDeidentifyText client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1597,63 +1696,62 @@ async def deidentify_audio( async def main() -> None: - await client.files.deidentify_audio( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=DeidentifyAudioRequestFile( - base_64="SGkgaSBhbSBEZXZhbnNodSwgbGl2...aW5nIGluIGNhbGlmb3JuaWEuIA==", - data_format="mp3", + await client.files.deidentify_text( + file=FileDataDeidentifyText( + base_64="base64", ), + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.deidentify_audio( - vault_id=vault_id, + _response = await self._raw_client.deidentify_text( file=file, - configuration_id=configuration_id, - output_processed_audio=output_processed_audio, - output_transcription=output_transcription, - bleep_gain=bleep_gain, - bleep_frequency=bleep_frequency, - bleep_start_padding=bleep_start_padding, - bleep_stop_padding=bleep_stop_padding, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data - async def get_run( - self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None - ) -> DeidentifyStatusResponse: + async def reidentify_file( + self, + *, + file: FileDataReidentifyFile, + vault_id: str, + format: typing.Optional[Format] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> ReidentifyFileResponse: """ - Returns the status of the detect run. + Re-identifies tokens in a file. Parameters ---------- - run_id : Uuid - ID of the detect run. + file : FileDataReidentifyFile - vault_id : ResourceId - ID of the vault. + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - DeidentifyStatusResponse - A successful response. + ReidentifyFileResponse + OK Examples -------- import asyncio - from skyflow import AsyncSkyflow + from skyflow import AsyncSkyflow, FileDataReidentifyFile client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1661,52 +1759,51 @@ async def get_run( async def main() -> None: - await client.files.get_run( - run_id="run_id", + await client.files.reidentify_file( + file=FileDataReidentifyFile( + base_64="base64", + data_format="txt", + ), vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) + _response = await self._raw_client.reidentify_file( + file=file, vault_id=vault_id, format=format, request_options=request_options + ) return _response.data - async def reidentify_file( + async def get_run( self, + run_id: str, *, - vault_id: VaultId, - file: ReidentifyFileRequestFile, - format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None, - ) -> ReidentifyFileResponse: + ) -> DetectRunsResponse: """ - Re-identifies tokens in a file. + Returns the status of a detect run. Parameters ---------- - vault_id : VaultId + run_id : str - file : ReidentifyFileRequestFile - File to re-identify. Files are specified as Base64-encoded data or an EFS path. - - format : typing.Optional[ReidentifyFileRequestFormat] - Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + vault_id : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - ReidentifyFileResponse - A successful response. + DetectRunsResponse + OK Examples -------- import asyncio from skyflow import AsyncSkyflow - from skyflow.files import ReidentifyFileRequestFile client = AsyncSkyflow( token="YOUR_TOKEN", @@ -1714,18 +1811,13 @@ async def reidentify_file( async def main() -> None: - await client.files.reidentify_file( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - file=ReidentifyFileRequestFile( - base_64="Zm9vYmFy", - data_format="txt", - ), + await client.files.get_run( + run_id="run_id", + vault_id="vault_id", ) asyncio.run(main()) """ - _response = await self._raw_client.reidentify_file( - vault_id=vault_id, file=file, format=format, request_options=request_options - ) + _response = await self._raw_client.get_run(run_id, vault_id=vault_id, request_options=request_options) return _response.data diff --git a/skyflow/generated/rest/files/raw_client.py b/skyflow/generated/rest/files/raw_client.py index c0e535ea..863ae5e3 100644 --- a/skyflow/generated/rest/files/raw_client.py +++ b/skyflow/generated/rest/files/raw_client.py @@ -12,34 +12,55 @@ from ..core.serialization import convert_and_respect_annotation_metadata from ..errors.bad_request_error import BadRequestError from ..errors.internal_server_error import InternalServerError -from ..errors.not_found_error import NotFoundError from ..errors.unauthorized_error import UnauthorizedError -from ..types.allow_regex import AllowRegex -from ..types.configuration_id import ConfigurationId from ..types.deidentify_file_response import DeidentifyFileResponse -from ..types.deidentify_status_response import DeidentifyStatusResponse -from ..types.entity_types import EntityTypes +from ..types.detect_runs_response import DetectRunsResponse from ..types.error_response import ErrorResponse +from ..types.file_data import FileData +from ..types.file_data_deidentify_audio import FileDataDeidentifyAudio +from ..types.file_data_deidentify_document import FileDataDeidentifyDocument +from ..types.file_data_deidentify_image import FileDataDeidentifyImage +from ..types.file_data_deidentify_pdf import FileDataDeidentifyPdf +from ..types.file_data_deidentify_presentation import FileDataDeidentifyPresentation +from ..types.file_data_deidentify_spreadsheet import FileDataDeidentifySpreadsheet +from ..types.file_data_deidentify_structured_text import FileDataDeidentifyStructuredText +from ..types.file_data_deidentify_text import FileDataDeidentifyText +from ..types.file_data_reidentify_file import FileDataReidentifyFile +from ..types.format import Format from ..types.reidentify_file_response import ReidentifyFileResponse -from ..types.resource_id import ResourceId -from ..types.restrict_regex import RestrictRegex -from ..types.token_type_without_vault import TokenTypeWithoutVault +from ..types.token_type_mapping import TokenTypeMapping from ..types.transformations import Transformations -from ..types.uuid_ import Uuid -from ..types.vault_id import VaultId -from .types.deidentify_audio_request_file import DeidentifyAudioRequestFile -from .types.deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription -from .types.deidentify_document_request_file import DeidentifyDocumentRequestFile -from .types.deidentify_file_request_file import DeidentifyFileRequestFile -from .types.deidentify_image_request_file import DeidentifyImageRequestFile -from .types.deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod -from .types.deidentify_pdf_request_file import DeidentifyPdfRequestFile -from .types.deidentify_presentation_request_file import DeidentifyPresentationRequestFile -from .types.deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile -from .types.deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile -from .types.deidentify_text_request_file import DeidentifyTextRequestFile -from .types.reidentify_file_request_file import ReidentifyFileRequestFile -from .types.reidentify_file_request_format import ReidentifyFileRequestFormat +from .types.deidentify_file_audio_request_deidentify_audio_entity_types_item import ( + DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem, +) +from .types.deidentify_file_audio_request_deidentify_audio_output_transcription import ( + DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription, +) +from .types.deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item import ( + DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem, +) +from .types.deidentify_file_image_request_deidentify_image_entity_types_item import ( + DeidentifyFileImageRequestDeidentifyImageEntityTypesItem, +) +from .types.deidentify_file_image_request_deidentify_image_masking_method import ( + DeidentifyFileImageRequestDeidentifyImageMaskingMethod, +) +from .types.deidentify_file_request_deidentify_document_entity_types_item import ( + DeidentifyFileRequestDeidentifyDocumentEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_presentation_entity_types_item import ( + DeidentifyFileRequestDeidentifyPresentationEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_spreadsheet_entity_types_item import ( + DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_structured_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem, +) +from .types.deidentify_file_request_deidentify_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyTextEntityTypesItem, +) +from .types.deidentify_file_request_entity_types_item import DeidentifyFileRequestEntityTypesItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -52,14 +73,14 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def deidentify_file( self, *, - vault_id: VaultId, - file: DeidentifyFileRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileData, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ @@ -67,49 +88,51 @@ def deidentify_file( Parameters ---------- - vault_id : VaultId + file : FileData - file : DeidentifyFileRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( "v1/detect/deidentify/file", method="POST", json={ + "file": convert_and_respect_annotation_metadata(object_=file, annotation=FileData, direction="write"), "vault_id": vault_id, - "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyFileRequestFile, direction="write" - ), - "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -165,67 +188,101 @@ def deidentify_file( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_document( + def deidentify_audio( self, *, - vault_id: VaultId, - file: DeidentifyDocumentRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyAudio, + vault_id: str, + output_transcription: typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] = OMIT, + output_processed_audio: typing.Optional[bool] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[int] = OMIT, + bleep_gain: typing.Optional[int] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyAudio + + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - file : DeidentifyDocumentRequestFile - File to de-identify. Files are specified as Base64-encoded data. + output_transcription : typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] + Type of transcription to output. + + output_processed_audio : typing.Optional[bool] + Whether to include the processed audio file in the response. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. - configuration_id : typing.Optional[ConfigurationId] + bleep_frequency : typing.Optional[int] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - entity_types : typing.Optional[EntityTypes] + bleep_gain : typing.Optional[int] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - token_type : typing.Optional[TokenTypeWithoutVault] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] + Entities to detect and de-identify. - allow_regex : typing.Optional[AllowRegex] + token_type : typing.Optional[TokenTypeMapping] - restrict_regex : typing.Optional[RestrictRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/document", + "v1/detect/deidentify/file/audio", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyAudio, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, + "output_transcription": output_transcription, + "output_processed_audio": output_processed_audio, + "bleep_start_padding": bleep_start_padding, + "bleep_stop_padding": bleep_stop_padding, + "bleep_frequency": bleep_frequency, + "bleep_gain": bleep_gain, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -281,77 +338,71 @@ def deidentify_document( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_pdf( + def deidentify_document( self, *, - vault_id: VaultId, - file: DeidentifyPdfRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[float] = OMIT, - max_resolution: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyDocument, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyPdfRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyDocument - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - density : typing.Optional[float] - Pixel density at which to process the PDF file. + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] + Entities to detect and de-identify. - max_resolution : typing.Optional[float] - Max resolution at which to process the PDF file. + token_type : typing.Optional[TokenTypeMapping] - entity_types : typing.Optional[EntityTypes] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/document/pdf", + "v1/detect/deidentify/file/document", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyPdfRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyDocument, direction="write" ), - "configuration_id": configuration_id, - "density": density, - "max_resolution": max_resolution, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -407,82 +458,83 @@ def deidentify_pdf( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_image( + def deidentify_pdf( self, *, - vault_id: VaultId, - file: DeidentifyImageRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_image: typing.Optional[bool] = OMIT, - output_ocr_text: typing.Optional[bool] = OMIT, - masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPdf, + vault_id: str, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPdf - file : DeidentifyImageRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] - - output_processed_image : typing.Optional[bool] - If `true`, includes processed image in the output. - - output_ocr_text : typing.Optional[bool] - If `true`, includes OCR text output in the response. + density : typing.Optional[int] + Pixel density at which to process the PDF file. - masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] - Method to mask the entities in the image. + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/image", + "v1/detect/deidentify/file/document/pdf", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyImageRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyPdf, direction="write" ), - "configuration_id": configuration_id, - "output_processed_image": output_processed_image, - "output_ocr_text": output_ocr_text, - "masking_method": masking_method, + "vault_id": vault_id, + "density": density, + "max_resolution": max_resolution, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -538,67 +590,86 @@ def deidentify_image( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_text( + def deidentify_image( self, *, - vault_id: VaultId, - file: DeidentifyTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyImage, + vault_id: str, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyImage + + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. - file : DeidentifyTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + output_ocr_text : typing.Optional[bool] + If `true`, includes text detected by OCR in the response. - configuration_id : typing.Optional[ConfigurationId] + masking_method : typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] + Method to mask the entities in the image. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/text", + "v1/detect/deidentify/file/image", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyTextRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyImage, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, + "output_processed_image": output_processed_image, + "output_ocr_text": output_ocr_text, + "masking_method": masking_method, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -654,67 +725,73 @@ def deidentify_text( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_structured_text( + def deidentify_presentation( self, *, - vault_id: VaultId, - file: DeidentifyStructuredTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPresentation, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPresentation - file : DeidentifyStructuredTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/structured_text", + "v1/detect/deidentify/file/presentation", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyPresentation, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -773,14 +850,16 @@ def deidentify_structured_text( def deidentify_spreadsheet( self, *, - vault_id: VaultId, - file: DeidentifySpreadsheetRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifySpreadsheet, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ @@ -788,49 +867,53 @@ def deidentify_spreadsheet( Parameters ---------- - vault_id : VaultId - - file : DeidentifySpreadsheetRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifySpreadsheet - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( "v1/detect/deidentify/file/spreadsheet", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifySpreadsheet, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -886,67 +969,73 @@ def deidentify_spreadsheet( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_presentation( + def deidentify_structured_text( self, *, - vault_id: VaultId, - file: DeidentifyPresentationRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyStructuredText, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyPresentationRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyStructuredText - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/presentation", + "v1/detect/deidentify/file/structured_text", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyStructuredText, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1002,97 +1091,71 @@ def deidentify_presentation( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def deidentify_audio( + def deidentify_text( self, *, - vault_id: VaultId, - file: DeidentifyAudioRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_audio: typing.Optional[bool] = OMIT, - output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, - bleep_gain: typing.Optional[float] = OMIT, - bleep_frequency: typing.Optional[float] = OMIT, - bleep_start_padding: typing.Optional[float] = OMIT, - bleep_stop_padding: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyText, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyAudioRequestFile - File to de-identify. Files are specified as Base64-encoded data. - - configuration_id : typing.Optional[ConfigurationId] - - output_processed_audio : typing.Optional[bool] - If `true`, includes processed audio file in the response. - - output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] - Type of transcription to output. - - bleep_gain : typing.Optional[float] - Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - - bleep_frequency : typing.Optional[float] - The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - - bleep_start_padding : typing.Optional[float] - Padding added to the beginning of a bleep, in seconds. + file : FileDataDeidentifyText - bleep_stop_padding : typing.Optional[float] - Padding added to the end of a bleep, in seconds. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/audio", + "v1/detect/deidentify/file/text", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyAudioRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyText, direction="write" ), - "configuration_id": configuration_id, - "output_processed_audio": output_processed_audio, - "output_transcription": output_transcription, - "bleep_gain": bleep_gain, - "bleep_frequency": bleep_frequency, - "bleep_start_padding": bleep_start_padding, - "bleep_stop_padding": bleep_stop_padding, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1148,42 +1211,56 @@ def deidentify_audio( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def get_run( - self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[DeidentifyStatusResponse]: + def reidentify_file( + self, + *, + file: FileDataReidentifyFile, + vault_id: str, + format: typing.Optional[Format] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ReidentifyFileResponse]: """ - Returns the status of the detect run. + Re-identifies tokens in a file. Parameters ---------- - run_id : Uuid - ID of the detect run. + file : FileDataReidentifyFile - vault_id : ResourceId - ID of the vault. + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - HttpResponse[DeidentifyStatusResponse] - A successful response. + HttpResponse[ReidentifyFileResponse] + OK """ _response = self._client_wrapper.httpx_client.request( - f"v1/detect/runs/{jsonable_encoder(run_id)}", - method="GET", - params={ + "v1/detect/reidentify/file", + method="POST", + json={ + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=FileDataReidentifyFile, direction="write" + ), "vault_id": vault_id, + "format": convert_and_respect_annotation_metadata(object_=format, annotation=Format, direction="write"), + }, + headers={ + "content-type": "application/json", }, request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: _data = typing.cast( - DeidentifyStatusResponse, + ReidentifyFileResponse, parse_obj_as( - type_=DeidentifyStatusResponse, # type: ignore + type_=ReidentifyFileResponse, # type: ignore object_=_response.json(), ), ) @@ -1210,17 +1287,6 @@ def get_run( ), ), ) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) if _response.status_code == 500: raise InternalServerError( headers=dict(_response.headers), @@ -1237,59 +1303,44 @@ def get_run( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - def reidentify_file( + def get_run( self, + run_id: str, *, - vault_id: VaultId, - file: ReidentifyFileRequestFile, - format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ReidentifyFileResponse]: + ) -> HttpResponse[DetectRunsResponse]: """ - Re-identifies tokens in a file. + Returns the status of a detect run. Parameters ---------- - vault_id : VaultId - - file : ReidentifyFileRequestFile - File to re-identify. Files are specified as Base64-encoded data or an EFS path. + run_id : str - format : typing.Optional[ReidentifyFileRequestFormat] - Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + vault_id : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - HttpResponse[ReidentifyFileResponse] - A successful response. + HttpResponse[DetectRunsResponse] + OK """ _response = self._client_wrapper.httpx_client.request( - "v1/detect/reidentify/file", - method="POST", - json={ + f"v1/detect/runs/{jsonable_encoder(run_id)}", + method="GET", + params={ "vault_id": vault_id, - "file": convert_and_respect_annotation_metadata( - object_=file, annotation=ReidentifyFileRequestFile, direction="write" - ), - "format": convert_and_respect_annotation_metadata( - object_=format, annotation=ReidentifyFileRequestFormat, direction="write" - ), - }, - headers={ - "content-type": "application/json", }, request_options=request_options, - omit=OMIT, ) try: if 200 <= _response.status_code < 300: _data = typing.cast( - ReidentifyFileResponse, + DetectRunsResponse, parse_obj_as( - type_=ReidentifyFileResponse, # type: ignore + type_=DetectRunsResponse, # type: ignore object_=_response.json(), ), ) @@ -1340,14 +1391,14 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def deidentify_file( self, *, - vault_id: VaultId, - file: DeidentifyFileRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileData, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ @@ -1355,49 +1406,51 @@ async def deidentify_file( Parameters ---------- - vault_id : VaultId + file : FileData - file : DeidentifyFileRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( "v1/detect/deidentify/file", method="POST", json={ + "file": convert_and_respect_annotation_metadata(object_=file, annotation=FileData, direction="write"), "vault_id": vault_id, - "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyFileRequestFile, direction="write" - ), - "configuration_id": configuration_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1453,67 +1506,101 @@ async def deidentify_file( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_document( + async def deidentify_audio( self, *, - vault_id: VaultId, - file: DeidentifyDocumentRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyAudio, + vault_id: str, + output_transcription: typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] = OMIT, + output_processed_audio: typing.Optional[bool] = OMIT, + bleep_start_padding: typing.Optional[float] = OMIT, + bleep_stop_padding: typing.Optional[float] = OMIT, + bleep_frequency: typing.Optional[int] = OMIT, + bleep_gain: typing.Optional[int] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. + De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyAudio + + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - file : DeidentifyDocumentRequestFile - File to de-identify. Files are specified as Base64-encoded data. + output_transcription : typing.Optional[DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription] + Type of transcription to output. + + output_processed_audio : typing.Optional[bool] + Whether to include the processed audio file in the response. + + bleep_start_padding : typing.Optional[float] + Padding added to the beginning of a bleep, in seconds. + + bleep_stop_padding : typing.Optional[float] + Padding added to the end of a bleep, in seconds. - configuration_id : typing.Optional[ConfigurationId] + bleep_frequency : typing.Optional[int] + The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - entity_types : typing.Optional[EntityTypes] + bleep_gain : typing.Optional[int] + Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - token_type : typing.Optional[TokenTypeWithoutVault] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem]] + Entities to detect and de-identify. - allow_regex : typing.Optional[AllowRegex] + token_type : typing.Optional[TokenTypeMapping] - restrict_regex : typing.Optional[RestrictRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. + + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/document", + "v1/detect/deidentify/file/audio", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyDocumentRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyAudio, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, + "output_transcription": output_transcription, + "output_processed_audio": output_processed_audio, + "bleep_start_padding": bleep_start_padding, + "bleep_stop_padding": bleep_stop_padding, + "bleep_frequency": bleep_frequency, + "bleep_gain": bleep_gain, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1569,77 +1656,71 @@ async def deidentify_document( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_pdf( + async def deidentify_document( self, *, - vault_id: VaultId, - file: DeidentifyPdfRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - density: typing.Optional[float] = OMIT, - max_resolution: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyDocument, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. + De-identifies sensitive data from a document file. This operation includes options applicable to all supported document file types.

          For more specific options, see the file type-specific opertions (like De-identify PDF) where they're available. For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyPdfRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyDocument - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - density : typing.Optional[float] - Pixel density at which to process the PDF file. + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyDocumentEntityTypesItem]] + Entities to detect and de-identify. - max_resolution : typing.Optional[float] - Max resolution at which to process the PDF file. + token_type : typing.Optional[TokenTypeMapping] - entity_types : typing.Optional[EntityTypes] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - token_type : typing.Optional[TokenTypeWithoutVault] - - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/document/pdf", + "v1/detect/deidentify/file/document", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyPdfRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyDocument, direction="write" ), - "configuration_id": configuration_id, - "density": density, - "max_resolution": max_resolution, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1695,82 +1776,83 @@ async def deidentify_pdf( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_image( + async def deidentify_pdf( self, *, - vault_id: VaultId, - file: DeidentifyImageRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_image: typing.Optional[bool] = OMIT, - output_ocr_text: typing.Optional[bool] = OMIT, - masking_method: typing.Optional[DeidentifyImageRequestMaskingMethod] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPdf, + vault_id: str, + density: typing.Optional[int] = OMIT, + max_resolution: typing.Optional[int] = OMIT, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a PDF file. This operation includes options specific to PDF files.

          For broader file type support, see De-identify Document and De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPdf - file : DeidentifyImageRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] - - output_processed_image : typing.Optional[bool] - If `true`, includes processed image in the output. - - output_ocr_text : typing.Optional[bool] - If `true`, includes OCR text output in the response. + density : typing.Optional[int] + Pixel density at which to process the PDF file. - masking_method : typing.Optional[DeidentifyImageRequestMaskingMethod] - Method to mask the entities in the image. + max_resolution : typing.Optional[int] + Max resolution at which to process the PDF file. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/image", + "v1/detect/deidentify/file/document/pdf", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyImageRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyPdf, direction="write" ), - "configuration_id": configuration_id, - "output_processed_image": output_processed_image, - "output_ocr_text": output_ocr_text, - "masking_method": masking_method, + "vault_id": vault_id, + "density": density, + "max_resolution": max_resolution, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1826,67 +1908,86 @@ async def deidentify_image( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_text( + async def deidentify_image( self, *, - vault_id: VaultId, - file: DeidentifyTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyImage, + vault_id: str, + output_processed_image: typing.Optional[bool] = OMIT, + output_ocr_text: typing.Optional[bool] = OMIT, + masking_method: typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] = OMIT, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from an image file. This operation includes options applicable to all supported image file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyImage + + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. + + output_processed_image : typing.Optional[bool] + If `true`, includes processed image in the output. - file : DeidentifyTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + output_ocr_text : typing.Optional[bool] + If `true`, includes text detected by OCR in the response. - configuration_id : typing.Optional[ConfigurationId] + masking_method : typing.Optional[DeidentifyFileImageRequestDeidentifyImageMaskingMethod] + Method to mask the entities in the image. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileImageRequestDeidentifyImageEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/text", + "v1/detect/deidentify/file/image", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyTextRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyImage, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, + "output_processed_image": output_processed_image, + "output_ocr_text": output_ocr_text, + "masking_method": masking_method, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -1942,67 +2043,73 @@ async def deidentify_text( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_structured_text( + async def deidentify_presentation( self, *, - vault_id: VaultId, - file: DeidentifyStructuredTextRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyPresentation, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId + file : FileDataDeidentifyPresentation - file : DeidentifyStructuredTextRequestFile - File to de-identify. Files are specified as Base64-encoded data. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - configuration_id : typing.Optional[ConfigurationId] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyPresentationEntityTypesItem]] + Entities to detect and de-identify. - entity_types : typing.Optional[EntityTypes] + token_type : typing.Optional[TokenTypeMapping] - token_type : typing.Optional[TokenTypeWithoutVault] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - allow_regex : typing.Optional[AllowRegex] - - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/structured_text", + "v1/detect/deidentify/file/presentation", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyStructuredTextRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyPresentation, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -2061,14 +2168,16 @@ async def deidentify_structured_text( async def deidentify_spreadsheet( self, *, - vault_id: VaultId, - file: DeidentifySpreadsheetRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifySpreadsheet, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ @@ -2076,49 +2185,53 @@ async def deidentify_spreadsheet( Parameters ---------- - vault_id : VaultId - - file : DeidentifySpreadsheetRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifySpreadsheet - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( "v1/detect/deidentify/file/spreadsheet", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifySpreadsheetRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifySpreadsheet, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -2174,67 +2287,73 @@ async def deidentify_spreadsheet( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_presentation( + async def deidentify_structured_text( self, *, - vault_id: VaultId, - file: DeidentifyPresentationRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyStructuredText, + vault_id: str, + entity_types: typing.Optional[ + typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem] + ] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from a presentation file. This operation includes options applicable to all supported presentation file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a structured text file. This operation includes options applicable to all supported structured text file types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyPresentationRequestFile - File to de-identify. Files are specified as Base64-encoded data. + file : FileDataDeidentifyStructuredText - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/presentation", + "v1/detect/deidentify/file/structured_text", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyPresentationRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyStructuredText, direction="write" ), - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -2290,97 +2409,71 @@ async def deidentify_presentation( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def deidentify_audio( + async def deidentify_text( self, *, - vault_id: VaultId, - file: DeidentifyAudioRequestFile, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - output_processed_audio: typing.Optional[bool] = OMIT, - output_transcription: typing.Optional[DeidentifyAudioRequestOutputTranscription] = OMIT, - bleep_gain: typing.Optional[float] = OMIT, - bleep_frequency: typing.Optional[float] = OMIT, - bleep_start_padding: typing.Optional[float] = OMIT, - bleep_stop_padding: typing.Optional[float] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenTypeWithoutVault] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + file: FileDataDeidentifyText, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyFileResponse]: """ - De-identifies sensitive data from an audio file. This operation includes options applicable to all supported audio file types.

          For broader file type support, see De-identify File. + De-identifies sensitive data from a text file. This operation includes options applicable to all supported image text types.

          For broader file type support, see De-identify File. Parameters ---------- - vault_id : VaultId - - file : DeidentifyAudioRequestFile - File to de-identify. Files are specified as Base64-encoded data. - - configuration_id : typing.Optional[ConfigurationId] - - output_processed_audio : typing.Optional[bool] - If `true`, includes processed audio file in the response. - - output_transcription : typing.Optional[DeidentifyAudioRequestOutputTranscription] - Type of transcription to output. - - bleep_gain : typing.Optional[float] - Relative loudness of the bleep in dB. Positive values increase its loudness, and negative values decrease it. - - bleep_frequency : typing.Optional[float] - The pitch of the bleep sound, in Hz. The higher the number, the higher the pitch. - - bleep_start_padding : typing.Optional[float] - Padding added to the beginning of a bleep, in seconds. + file : FileDataDeidentifyText - bleep_stop_padding : typing.Optional[float] - Padding added to the end of a bleep, in seconds. + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyFileRequestDeidentifyTextEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenTypeWithoutVault] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyFileResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/deidentify/file/audio", + "v1/detect/deidentify/file/text", method="POST", json={ - "vault_id": vault_id, "file": convert_and_respect_annotation_metadata( - object_=file, annotation=DeidentifyAudioRequestFile, direction="write" + object_=file, annotation=FileDataDeidentifyText, direction="write" ), - "configuration_id": configuration_id, - "output_processed_audio": output_processed_audio, - "output_transcription": output_transcription, - "bleep_gain": bleep_gain, - "bleep_frequency": bleep_frequency, - "bleep_start_padding": bleep_start_padding, - "bleep_stop_padding": bleep_stop_padding, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenTypeWithoutVault, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -2436,42 +2529,56 @@ async def deidentify_audio( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def get_run( - self, run_id: Uuid, *, vault_id: ResourceId, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[DeidentifyStatusResponse]: + async def reidentify_file( + self, + *, + file: FileDataReidentifyFile, + vault_id: str, + format: typing.Optional[Format] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ReidentifyFileResponse]: """ - Returns the status of the detect run. + Re-identifies tokens in a file. Parameters ---------- - run_id : Uuid - ID of the detect run. + file : FileDataReidentifyFile - vault_id : ResourceId - ID of the vault. + vault_id : str + ID of the vault where the entities are stored. + + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - AsyncHttpResponse[DeidentifyStatusResponse] - A successful response. + AsyncHttpResponse[ReidentifyFileResponse] + OK """ _response = await self._client_wrapper.httpx_client.request( - f"v1/detect/runs/{jsonable_encoder(run_id)}", - method="GET", - params={ + "v1/detect/reidentify/file", + method="POST", + json={ + "file": convert_and_respect_annotation_metadata( + object_=file, annotation=FileDataReidentifyFile, direction="write" + ), "vault_id": vault_id, + "format": convert_and_respect_annotation_metadata(object_=format, annotation=Format, direction="write"), + }, + headers={ + "content-type": "application/json", }, request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: _data = typing.cast( - DeidentifyStatusResponse, + ReidentifyFileResponse, parse_obj_as( - type_=DeidentifyStatusResponse, # type: ignore + type_=ReidentifyFileResponse, # type: ignore object_=_response.json(), ), ) @@ -2498,17 +2605,6 @@ async def get_run( ), ), ) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) if _response.status_code == 500: raise InternalServerError( headers=dict(_response.headers), @@ -2525,59 +2621,44 @@ async def get_run( raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - async def reidentify_file( + async def get_run( self, + run_id: str, *, - vault_id: VaultId, - file: ReidentifyFileRequestFile, - format: typing.Optional[ReidentifyFileRequestFormat] = OMIT, + vault_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ReidentifyFileResponse]: + ) -> AsyncHttpResponse[DetectRunsResponse]: """ - Re-identifies tokens in a file. + Returns the status of a detect run. Parameters ---------- - vault_id : VaultId - - file : ReidentifyFileRequestFile - File to re-identify. Files are specified as Base64-encoded data or an EFS path. + run_id : str - format : typing.Optional[ReidentifyFileRequestFormat] - Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + vault_id : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - AsyncHttpResponse[ReidentifyFileResponse] - A successful response. + AsyncHttpResponse[DetectRunsResponse] + OK """ _response = await self._client_wrapper.httpx_client.request( - "v1/detect/reidentify/file", - method="POST", - json={ + f"v1/detect/runs/{jsonable_encoder(run_id)}", + method="GET", + params={ "vault_id": vault_id, - "file": convert_and_respect_annotation_metadata( - object_=file, annotation=ReidentifyFileRequestFile, direction="write" - ), - "format": convert_and_respect_annotation_metadata( - object_=format, annotation=ReidentifyFileRequestFormat, direction="write" - ), - }, - headers={ - "content-type": "application/json", }, request_options=request_options, - omit=OMIT, ) try: if 200 <= _response.status_code < 300: _data = typing.cast( - ReidentifyFileResponse, + DetectRunsResponse, parse_obj_as( - type_=ReidentifyFileResponse, # type: ignore + type_=DetectRunsResponse, # type: ignore object_=_response.json(), ), ) diff --git a/skyflow/generated/rest/files/types/__init__.py b/skyflow/generated/rest/files/types/__init__.py index 78943a33..5ff54d96 100644 --- a/skyflow/generated/rest/files/types/__init__.py +++ b/skyflow/generated/rest/files/types/__init__.py @@ -2,48 +2,48 @@ # isort: skip_file -from .deidentify_audio_request_file import DeidentifyAudioRequestFile -from .deidentify_audio_request_file_data_format import DeidentifyAudioRequestFileDataFormat -from .deidentify_audio_request_output_transcription import DeidentifyAudioRequestOutputTranscription -from .deidentify_document_request_file import DeidentifyDocumentRequestFile -from .deidentify_document_request_file_data_format import DeidentifyDocumentRequestFileDataFormat -from .deidentify_file_request_file import DeidentifyFileRequestFile -from .deidentify_file_request_file_data_format import DeidentifyFileRequestFileDataFormat -from .deidentify_image_request_file import DeidentifyImageRequestFile -from .deidentify_image_request_file_data_format import DeidentifyImageRequestFileDataFormat -from .deidentify_image_request_masking_method import DeidentifyImageRequestMaskingMethod -from .deidentify_pdf_request_file import DeidentifyPdfRequestFile -from .deidentify_presentation_request_file import DeidentifyPresentationRequestFile -from .deidentify_presentation_request_file_data_format import DeidentifyPresentationRequestFileDataFormat -from .deidentify_spreadsheet_request_file import DeidentifySpreadsheetRequestFile -from .deidentify_spreadsheet_request_file_data_format import DeidentifySpreadsheetRequestFileDataFormat -from .deidentify_structured_text_request_file import DeidentifyStructuredTextRequestFile -from .deidentify_structured_text_request_file_data_format import DeidentifyStructuredTextRequestFileDataFormat -from .deidentify_text_request_file import DeidentifyTextRequestFile -from .reidentify_file_request_file import ReidentifyFileRequestFile -from .reidentify_file_request_file_data_format import ReidentifyFileRequestFileDataFormat -from .reidentify_file_request_format import ReidentifyFileRequestFormat +from .deidentify_file_audio_request_deidentify_audio_entity_types_item import ( + DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem, +) +from .deidentify_file_audio_request_deidentify_audio_output_transcription import ( + DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription, +) +from .deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item import ( + DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem, +) +from .deidentify_file_image_request_deidentify_image_entity_types_item import ( + DeidentifyFileImageRequestDeidentifyImageEntityTypesItem, +) +from .deidentify_file_image_request_deidentify_image_masking_method import ( + DeidentifyFileImageRequestDeidentifyImageMaskingMethod, +) +from .deidentify_file_request_deidentify_document_entity_types_item import ( + DeidentifyFileRequestDeidentifyDocumentEntityTypesItem, +) +from .deidentify_file_request_deidentify_presentation_entity_types_item import ( + DeidentifyFileRequestDeidentifyPresentationEntityTypesItem, +) +from .deidentify_file_request_deidentify_spreadsheet_entity_types_item import ( + DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem, +) +from .deidentify_file_request_deidentify_structured_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem, +) +from .deidentify_file_request_deidentify_text_entity_types_item import ( + DeidentifyFileRequestDeidentifyTextEntityTypesItem, +) +from .deidentify_file_request_entity_types_item import DeidentifyFileRequestEntityTypesItem __all__ = [ - "DeidentifyAudioRequestFile", - "DeidentifyAudioRequestFileDataFormat", - "DeidentifyAudioRequestOutputTranscription", - "DeidentifyDocumentRequestFile", - "DeidentifyDocumentRequestFileDataFormat", - "DeidentifyFileRequestFile", - "DeidentifyFileRequestFileDataFormat", - "DeidentifyImageRequestFile", - "DeidentifyImageRequestFileDataFormat", - "DeidentifyImageRequestMaskingMethod", - "DeidentifyPdfRequestFile", - "DeidentifyPresentationRequestFile", - "DeidentifyPresentationRequestFileDataFormat", - "DeidentifySpreadsheetRequestFile", - "DeidentifySpreadsheetRequestFileDataFormat", - "DeidentifyStructuredTextRequestFile", - "DeidentifyStructuredTextRequestFileDataFormat", - "DeidentifyTextRequestFile", - "ReidentifyFileRequestFile", - "ReidentifyFileRequestFileDataFormat", - "ReidentifyFileRequestFormat", + "DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem", + "DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription", + "DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageEntityTypesItem", + "DeidentifyFileImageRequestDeidentifyImageMaskingMethod", + "DeidentifyFileRequestDeidentifyDocumentEntityTypesItem", + "DeidentifyFileRequestDeidentifyPresentationEntityTypesItem", + "DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem", + "DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem", + "DeidentifyFileRequestDeidentifyTextEntityTypesItem", + "DeidentifyFileRequestEntityTypesItem", ] diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py deleted file mode 100644 index 85f60bbb..00000000 --- a/skyflow/generated/rest/files/types/deidentify_audio_request_file_data_format.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyAudioRequestFileDataFormat = typing.Union[typing.Literal["mp3", "wav"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py b/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py deleted file mode 100644 index 4588b1d1..00000000 --- a/skyflow/generated/rest/files/types/deidentify_audio_request_output_transcription.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyAudioRequestOutputTranscription = typing.Union[ - typing.Literal[ - "diarized_transcription", - "medical_diarized_transcription", - "medical_transcription", - "plaintext_transcription", - "transcription", - ], - typing.Any, -] diff --git a/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py deleted file mode 100644 index a20f4fd8..00000000 --- a/skyflow/generated/rest/files/types/deidentify_document_request_file_data_format.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyDocumentRequestFileDataFormat = typing.Union[typing.Literal["doc", "docx", "pdf"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_entity_types_item.py new file mode 100644 index 00000000..b22a177a --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileAudioRequestDeidentifyAudioEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_output_transcription.py b/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_output_transcription.py new file mode 100644 index 00000000..957cee86 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_audio_request_deidentify_audio_output_transcription.py @@ -0,0 +1,10 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileAudioRequestDeidentifyAudioOutputTranscription = typing.Union[ + typing.Literal[ + "transcription", "medical_transcription", "diarized_transcription", "medical_diarized_transcription" + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item.py new file mode 100644 index 00000000..d8469e74 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_document_pdf_request_deidentify_pdf_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileDocumentPdfRequestDeidentifyPdfEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_entity_types_item.py new file mode 100644 index 00000000..4597dc0f --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileImageRequestDeidentifyImageEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_masking_method.py b/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_masking_method.py new file mode 100644 index 00000000..c357821e --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_image_request_deidentify_image_masking_method.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileImageRequestDeidentifyImageMaskingMethod = typing.Union[typing.Literal["blur", "blackbox"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_document_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_document_entity_types_item.py new file mode 100644 index 00000000..afcbe2ee --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_document_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestDeidentifyDocumentEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_presentation_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_presentation_entity_types_item.py new file mode 100644 index 00000000..69d40627 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_presentation_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestDeidentifyPresentationEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_spreadsheet_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_spreadsheet_entity_types_item.py new file mode 100644 index 00000000..0bb37078 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_spreadsheet_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestDeidentifySpreadsheetEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_structured_text_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_structured_text_entity_types_item.py new file mode 100644 index 00000000..a487d969 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_structured_text_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestDeidentifyStructuredTextEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_text_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_text_entity_types_item.py new file mode 100644 index 00000000..2e6118cd --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_deidentify_text_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestDeidentifyTextEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_entity_types_item.py b/skyflow/generated/rest/files/types/deidentify_file_request_entity_types_item.py new file mode 100644 index 00000000..6d7e8d38 --- /dev/null +++ b/skyflow/generated/rest/files/types/deidentify_file_request_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyFileRequestEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py deleted file mode 100644 index a2ca8f2a..00000000 --- a/skyflow/generated/rest/files/types/deidentify_image_request_file_data_format.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyImageRequestFileDataFormat = typing.Union[ - typing.Literal["bmp", "jpeg", "jpg", "png", "tif", "tiff"], typing.Any -] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py b/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py deleted file mode 100644 index bc0c338c..00000000 --- a/skyflow/generated/rest/files/types/deidentify_image_request_masking_method.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyImageRequestMaskingMethod = typing.Union[typing.Literal["blackbox", "blur"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py b/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py deleted file mode 100644 index c618ccc1..00000000 --- a/skyflow/generated/rest/files/types/deidentify_presentation_request_file.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_presentation_request_file_data_format import DeidentifyPresentationRequestFileDataFormat - - -class DeidentifyPresentationRequestFile(UniversalBaseModel): - """ - File to de-identify. Files are specified as Base64-encoded data. - """ - - base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() - """ - Base64-encoded data of the file to de-identify. - """ - - data_format: DeidentifyPresentationRequestFileDataFormat = pydantic.Field() - """ - Data format of the file. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py deleted file mode 100644 index d09f42f8..00000000 --- a/skyflow/generated/rest/files/types/deidentify_presentation_request_file_data_format.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyPresentationRequestFileDataFormat = typing.Union[typing.Literal["ppt", "pptx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py deleted file mode 100644 index 20db2856..00000000 --- a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file_data_format.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifySpreadsheetRequestFileDataFormat = typing.Union[typing.Literal["csv", "xls", "xlsx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py deleted file mode 100644 index aa2d0834..00000000 --- a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_structured_text_request_file_data_format import DeidentifyStructuredTextRequestFileDataFormat - - -class DeidentifyStructuredTextRequestFile(UniversalBaseModel): - """ - File to de-identify. Files are specified as Base64-encoded data. - """ - - base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() - """ - Base64-encoded data of the file to de-identify. - """ - - data_format: DeidentifyStructuredTextRequestFileDataFormat = pydantic.Field() - """ - Data format of the file. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py b/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py deleted file mode 100644 index f956fe48..00000000 --- a/skyflow/generated/rest/files/types/deidentify_structured_text_request_file_data_format.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyStructuredTextRequestFileDataFormat = typing.Union[typing.Literal["json", "xml"], typing.Any] diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_file.py b/skyflow/generated/rest/files/types/reidentify_file_request_file.py deleted file mode 100644 index 429f22ee..00000000 --- a/skyflow/generated/rest/files/types/reidentify_file_request_file.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .reidentify_file_request_file_data_format import ReidentifyFileRequestFileDataFormat - - -class ReidentifyFileRequestFile(UniversalBaseModel): - """ - File to re-identify. Files are specified as Base64-encoded data or an EFS path. - """ - - base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() - """ - Base64-encoded data of the file to re-identify. - """ - - data_format: ReidentifyFileRequestFileDataFormat = pydantic.Field() - """ - Data format of the file. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py b/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py deleted file mode 100644 index 5aca9bb6..00000000 --- a/skyflow/generated/rest/files/types/reidentify_file_request_file_data_format.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -ReidentifyFileRequestFileDataFormat = typing.Union[ - typing.Literal["csv", "doc", "docx", "json", "txt", "xls", "xlsx", "xml"], typing.Any -] diff --git a/skyflow/generated/rest/guardrails/client.py b/skyflow/generated/rest/guardrails/client.py index e7fe1e05..88cab59d 100644 --- a/skyflow/generated/rest/guardrails/client.py +++ b/skyflow/generated/rest/guardrails/client.py @@ -4,8 +4,7 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.check_guardrails_response import CheckGuardrailsResponse -from ..types.vault_id import VaultId +from ..types.detect_guardrails_response import DetectGuardrailsResponse from .raw_client import AsyncRawGuardrailsClient, RawGuardrailsClient # this is used as the default value for optional parameters @@ -30,24 +29,25 @@ def with_raw_response(self) -> RawGuardrailsClient: def check_guardrails( self, *, - vault_id: VaultId, text: str, + vault_id: str, check_toxicity: typing.Optional[bool] = OMIT, deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> CheckGuardrailsResponse: + ) -> DetectGuardrailsResponse: """ Preserve safety and compliance with usage policies. Parameters ---------- - vault_id : VaultId - text : str Text to check against guardrails. + vault_id : str + ID of the vault. + check_toxicity : typing.Optional[bool] - Check for toxicity in the text. + If `true`, checks for toxicity in the text. deny_topics : typing.Optional[typing.Sequence[str]] List of topics to deny. @@ -57,8 +57,8 @@ def check_guardrails( Returns ------- - CheckGuardrailsResponse - A successful response. + DetectGuardrailsResponse + OK Examples -------- @@ -68,13 +68,15 @@ def check_guardrails( token="YOUR_TOKEN", ) client.guardrails.check_guardrails( - vault_id="vault_id", - text="text", + text="I love to play cricket.", + check_toxicity=True, + deny_topics=["sports"], + vault_id="$VAULT_ID", ) """ _response = self._raw_client.check_guardrails( - vault_id=vault_id, text=text, + vault_id=vault_id, check_toxicity=check_toxicity, deny_topics=deny_topics, request_options=request_options, @@ -100,24 +102,25 @@ def with_raw_response(self) -> AsyncRawGuardrailsClient: async def check_guardrails( self, *, - vault_id: VaultId, text: str, + vault_id: str, check_toxicity: typing.Optional[bool] = OMIT, deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> CheckGuardrailsResponse: + ) -> DetectGuardrailsResponse: """ Preserve safety and compliance with usage policies. Parameters ---------- - vault_id : VaultId - text : str Text to check against guardrails. + vault_id : str + ID of the vault. + check_toxicity : typing.Optional[bool] - Check for toxicity in the text. + If `true`, checks for toxicity in the text. deny_topics : typing.Optional[typing.Sequence[str]] List of topics to deny. @@ -127,8 +130,8 @@ async def check_guardrails( Returns ------- - CheckGuardrailsResponse - A successful response. + DetectGuardrailsResponse + OK Examples -------- @@ -143,16 +146,18 @@ async def check_guardrails( async def main() -> None: await client.guardrails.check_guardrails( - vault_id="vault_id", - text="text", + text="I love to play cricket.", + check_toxicity=True, + deny_topics=["sports"], + vault_id="$VAULT_ID", ) asyncio.run(main()) """ _response = await self._raw_client.check_guardrails( - vault_id=vault_id, text=text, + vault_id=vault_id, check_toxicity=check_toxicity, deny_topics=deny_topics, request_options=request_options, diff --git a/skyflow/generated/rest/guardrails/raw_client.py b/skyflow/generated/rest/guardrails/raw_client.py index 11030fd3..1328b1b8 100644 --- a/skyflow/generated/rest/guardrails/raw_client.py +++ b/skyflow/generated/rest/guardrails/raw_client.py @@ -11,9 +11,8 @@ from ..errors.bad_request_error import BadRequestError from ..errors.internal_server_error import InternalServerError from ..errors.unauthorized_error import UnauthorizedError -from ..types.check_guardrails_response import CheckGuardrailsResponse +from ..types.detect_guardrails_response import DetectGuardrailsResponse from ..types.error_response import ErrorResponse -from ..types.vault_id import VaultId # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -26,24 +25,25 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def check_guardrails( self, *, - vault_id: VaultId, text: str, + vault_id: str, check_toxicity: typing.Optional[bool] = OMIT, deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[CheckGuardrailsResponse]: + ) -> HttpResponse[DetectGuardrailsResponse]: """ Preserve safety and compliance with usage policies. Parameters ---------- - vault_id : VaultId - text : str Text to check against guardrails. + vault_id : str + ID of the vault. + check_toxicity : typing.Optional[bool] - Check for toxicity in the text. + If `true`, checks for toxicity in the text. deny_topics : typing.Optional[typing.Sequence[str]] List of topics to deny. @@ -53,17 +53,17 @@ def check_guardrails( Returns ------- - HttpResponse[CheckGuardrailsResponse] - A successful response. + HttpResponse[DetectGuardrailsResponse] + OK """ _response = self._client_wrapper.httpx_client.request( "v1/detect/guardrails", method="POST", json={ - "vault_id": vault_id, "text": text, "check_toxicity": check_toxicity, "deny_topics": deny_topics, + "vault_id": vault_id, }, headers={ "content-type": "application/json", @@ -74,9 +74,9 @@ def check_guardrails( try: if 200 <= _response.status_code < 300: _data = typing.cast( - CheckGuardrailsResponse, + DetectGuardrailsResponse, parse_obj_as( - type_=CheckGuardrailsResponse, # type: ignore + type_=DetectGuardrailsResponse, # type: ignore object_=_response.json(), ), ) @@ -127,24 +127,25 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def check_guardrails( self, *, - vault_id: VaultId, text: str, + vault_id: str, check_toxicity: typing.Optional[bool] = OMIT, deny_topics: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[CheckGuardrailsResponse]: + ) -> AsyncHttpResponse[DetectGuardrailsResponse]: """ Preserve safety and compliance with usage policies. Parameters ---------- - vault_id : VaultId - text : str Text to check against guardrails. + vault_id : str + ID of the vault. + check_toxicity : typing.Optional[bool] - Check for toxicity in the text. + If `true`, checks for toxicity in the text. deny_topics : typing.Optional[typing.Sequence[str]] List of topics to deny. @@ -154,17 +155,17 @@ async def check_guardrails( Returns ------- - AsyncHttpResponse[CheckGuardrailsResponse] - A successful response. + AsyncHttpResponse[DetectGuardrailsResponse] + OK """ _response = await self._client_wrapper.httpx_client.request( "v1/detect/guardrails", method="POST", json={ - "vault_id": vault_id, "text": text, "check_toxicity": check_toxicity, "deny_topics": deny_topics, + "vault_id": vault_id, }, headers={ "content-type": "application/json", @@ -175,9 +176,9 @@ async def check_guardrails( try: if 200 <= _response.status_code < 300: _data = typing.cast( - CheckGuardrailsResponse, + DetectGuardrailsResponse, parse_obj_as( - type_=CheckGuardrailsResponse, # type: ignore + type_=DetectGuardrailsResponse, # type: ignore object_=_response.json(), ), ) diff --git a/skyflow/generated/rest/records/client.py b/skyflow/generated/rest/records/client.py index cfe15a1c..1b5ddeb7 100644 --- a/skyflow/generated/rest/records/client.py +++ b/skyflow/generated/rest/records/client.py @@ -192,6 +192,13 @@ def record_service_bulk_get_record( client.records.record_service_bulk_get_record( vault_id="vaultID", object_name="objectName", + redaction="DEFAULT", + tokenization=True, + offset="offset", + limit="limit", + download_url=True, + column_name="column_name", + order_by="ASCENDING", ) """ _response = self._raw_client.record_service_bulk_get_record( @@ -409,6 +416,9 @@ def record_service_get_record( vault_id="vaultID", object_name="objectName", id="ID", + redaction="DEFAULT", + tokenization=True, + download_url=True, ) """ _response = self._raw_client.record_service_get_record( @@ -947,6 +957,13 @@ async def main() -> None: await client.records.record_service_bulk_get_record( vault_id="vaultID", object_name="objectName", + redaction="DEFAULT", + tokenization=True, + offset="offset", + limit="limit", + download_url=True, + column_name="column_name", + order_by="ASCENDING", ) @@ -1188,6 +1205,9 @@ async def main() -> None: vault_id="vaultID", object_name="objectName", id="ID", + redaction="DEFAULT", + tokenization=True, + download_url=True, ) diff --git a/skyflow/generated/rest/strings/__init__.py b/skyflow/generated/rest/strings/__init__.py index 4cabb7fb..50bdd77e 100644 --- a/skyflow/generated/rest/strings/__init__.py +++ b/skyflow/generated/rest/strings/__init__.py @@ -2,6 +2,6 @@ # isort: skip_file -from .types import ReidentifyStringRequestFormat +from .types import DeidentifyStringRequestEntityTypesItem -__all__ = ["ReidentifyStringRequestFormat"] +__all__ = ["DeidentifyStringRequestEntityTypesItem"] diff --git a/skyflow/generated/rest/strings/client.py b/skyflow/generated/rest/strings/client.py index 14b2266d..88d8c2c2 100644 --- a/skyflow/generated/rest/strings/client.py +++ b/skyflow/generated/rest/strings/client.py @@ -4,17 +4,13 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.allow_regex import AllowRegex -from ..types.configuration_id import ConfigurationId from ..types.deidentify_string_response import DeidentifyStringResponse -from ..types.entity_types import EntityTypes -from ..types.reidentify_string_response import ReidentifyStringResponse -from ..types.restrict_regex import RestrictRegex -from ..types.token_type import TokenType +from ..types.format import Format +from ..types.identify_response import IdentifyResponse +from ..types.token_type_mapping import TokenTypeMapping from ..types.transformations import Transformations -from ..types.vault_id import VaultId from .raw_client import AsyncRawStringsClient, RawStringsClient -from .types.reidentify_string_request_format import ReidentifyStringRequestFormat +from .types.deidentify_string_request_entity_types_item import DeidentifyStringRequestEntityTypesItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -38,14 +34,14 @@ def with_raw_response(self) -> RawStringsClient: def deidentify_string( self, *, - vault_id: VaultId, text: str, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenType] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyStringResponse: """ @@ -53,30 +49,35 @@ def deidentify_string( Parameters ---------- - vault_id : VaultId - text : str - String to de-identify. + Text to de-identify. - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenType] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyStringResponse - A successful response. + OK Examples -------- @@ -86,19 +87,19 @@ def deidentify_string( token="YOUR_TOKEN", ) client.strings.deidentify_string( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - text="My name is John Doe, and my email is johndoe@acme.com.", + text="text", + vault_id="f4b3b3b3-3b3b-3b3b-3b3b-3b3b3b3b3b3b", ) """ _response = self._raw_client.deidentify_string( - vault_id=vault_id, text=text, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data @@ -106,32 +107,31 @@ def deidentify_string( def reidentify_string( self, *, - text: str, - vault_id: str, - format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + text: typing.Optional[str] = OMIT, + vault_id: typing.Optional[str] = OMIT, + format: typing.Optional[Format] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> ReidentifyStringResponse: + ) -> IdentifyResponse: """ Re-identifies tokens in a string. Parameters ---------- - text : str - String to re-identify. + text : typing.Optional[str] + Text to reidentify. - vault_id : str + vault_id : typing.Optional[str] ID of the vault where the entities are stored. - format : typing.Optional[ReidentifyStringRequestFormat] - Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - ReidentifyStringResponse - A successful response. + IdentifyResponse + OK Examples -------- @@ -140,10 +140,7 @@ def reidentify_string( client = Skyflow( token="YOUR_TOKEN", ) - client.strings.reidentify_string( - text="My name is [NAME_1], and my email is [EMAIL_1].", - vault_id="1ad6db07-8405-46cf-9a1e-db148ff9f4c5", - ) + client.strings.reidentify_string() """ _response = self._raw_client.reidentify_string( text=text, vault_id=vault_id, format=format, request_options=request_options @@ -169,14 +166,14 @@ def with_raw_response(self) -> AsyncRawStringsClient: async def deidentify_string( self, *, - vault_id: VaultId, text: str, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenType] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> DeidentifyStringResponse: """ @@ -184,30 +181,35 @@ async def deidentify_string( Parameters ---------- - vault_id : VaultId - text : str - String to de-identify. + Text to de-identify. - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenType] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- DeidentifyStringResponse - A successful response. + OK Examples -------- @@ -222,22 +224,22 @@ async def deidentify_string( async def main() -> None: await client.strings.deidentify_string( - vault_id="f4b3b3b33b3b3b3b3b3b3b3b3b3b3b3b", - text="My name is John Doe, and my email is johndoe@acme.com.", + text="text", + vault_id="f4b3b3b3-3b3b-3b3b-3b3b-3b3b3b3b3b3b", ) asyncio.run(main()) """ _response = await self._raw_client.deidentify_string( - vault_id=vault_id, text=text, - configuration_id=configuration_id, + vault_id=vault_id, entity_types=entity_types, token_type=token_type, allow_regex=allow_regex, restrict_regex=restrict_regex, transformations=transformations, + configuration_id=configuration_id, request_options=request_options, ) return _response.data @@ -245,32 +247,31 @@ async def main() -> None: async def reidentify_string( self, *, - text: str, - vault_id: str, - format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + text: typing.Optional[str] = OMIT, + vault_id: typing.Optional[str] = OMIT, + format: typing.Optional[Format] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> ReidentifyStringResponse: + ) -> IdentifyResponse: """ Re-identifies tokens in a string. Parameters ---------- - text : str - String to re-identify. + text : typing.Optional[str] + Text to reidentify. - vault_id : str + vault_id : typing.Optional[str] ID of the vault where the entities are stored. - format : typing.Optional[ReidentifyStringRequestFormat] - Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - ReidentifyStringResponse - A successful response. + IdentifyResponse + OK Examples -------- @@ -284,10 +285,7 @@ async def reidentify_string( async def main() -> None: - await client.strings.reidentify_string( - text="My name is [NAME_1], and my email is [EMAIL_1].", - vault_id="1ad6db07-8405-46cf-9a1e-db148ff9f4c5", - ) + await client.strings.reidentify_string() asyncio.run(main()) diff --git a/skyflow/generated/rest/strings/raw_client.py b/skyflow/generated/rest/strings/raw_client.py index 3ae9bf41..313c10ce 100644 --- a/skyflow/generated/rest/strings/raw_client.py +++ b/skyflow/generated/rest/strings/raw_client.py @@ -12,17 +12,13 @@ from ..errors.bad_request_error import BadRequestError from ..errors.internal_server_error import InternalServerError from ..errors.unauthorized_error import UnauthorizedError -from ..types.allow_regex import AllowRegex -from ..types.configuration_id import ConfigurationId from ..types.deidentify_string_response import DeidentifyStringResponse -from ..types.entity_types import EntityTypes from ..types.error_response import ErrorResponse -from ..types.reidentify_string_response import ReidentifyStringResponse -from ..types.restrict_regex import RestrictRegex -from ..types.token_type import TokenType +from ..types.format import Format +from ..types.identify_response import IdentifyResponse +from ..types.token_type_mapping import TokenTypeMapping from ..types.transformations import Transformations -from ..types.vault_id import VaultId -from .types.reidentify_string_request_format import ReidentifyStringRequestFormat +from .types.deidentify_string_request_entity_types_item import DeidentifyStringRequestEntityTypesItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -35,14 +31,14 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def deidentify_string( self, *, - vault_id: VaultId, text: str, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenType] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> HttpResponse[DeidentifyStringResponse]: """ @@ -50,47 +46,52 @@ def deidentify_string( Parameters ---------- - vault_id : VaultId - text : str - String to de-identify. + Text to de-identify. - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenType] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- HttpResponse[DeidentifyStringResponse] - A successful response. + OK """ _response = self._client_wrapper.httpx_client.request( "v1/detect/deidentify/string", method="POST", json={ - "vault_id": vault_id, "text": text, - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenType, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -149,32 +150,31 @@ def deidentify_string( def reidentify_string( self, *, - text: str, - vault_id: str, - format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + text: typing.Optional[str] = OMIT, + vault_id: typing.Optional[str] = OMIT, + format: typing.Optional[Format] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ReidentifyStringResponse]: + ) -> HttpResponse[IdentifyResponse]: """ Re-identifies tokens in a string. Parameters ---------- - text : str - String to re-identify. + text : typing.Optional[str] + Text to reidentify. - vault_id : str + vault_id : typing.Optional[str] ID of the vault where the entities are stored. - format : typing.Optional[ReidentifyStringRequestFormat] - Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - HttpResponse[ReidentifyStringResponse] - A successful response. + HttpResponse[IdentifyResponse] + OK """ _response = self._client_wrapper.httpx_client.request( "v1/detect/reidentify/string", @@ -182,9 +182,7 @@ def reidentify_string( json={ "text": text, "vault_id": vault_id, - "format": convert_and_respect_annotation_metadata( - object_=format, annotation=ReidentifyStringRequestFormat, direction="write" - ), + "format": convert_and_respect_annotation_metadata(object_=format, annotation=Format, direction="write"), }, headers={ "content-type": "application/json", @@ -195,9 +193,9 @@ def reidentify_string( try: if 200 <= _response.status_code < 300: _data = typing.cast( - ReidentifyStringResponse, + IdentifyResponse, parse_obj_as( - type_=ReidentifyStringResponse, # type: ignore + type_=IdentifyResponse, # type: ignore object_=_response.json(), ), ) @@ -248,14 +246,14 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def deidentify_string( self, *, - vault_id: VaultId, text: str, - configuration_id: typing.Optional[ConfigurationId] = OMIT, - entity_types: typing.Optional[EntityTypes] = OMIT, - token_type: typing.Optional[TokenType] = OMIT, - allow_regex: typing.Optional[AllowRegex] = OMIT, - restrict_regex: typing.Optional[RestrictRegex] = OMIT, + vault_id: str, + entity_types: typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] = OMIT, + token_type: typing.Optional[TokenTypeMapping] = OMIT, + allow_regex: typing.Optional[typing.Sequence[str]] = OMIT, + restrict_regex: typing.Optional[typing.Sequence[str]] = OMIT, transformations: typing.Optional[Transformations] = OMIT, + configuration_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> AsyncHttpResponse[DeidentifyStringResponse]: """ @@ -263,47 +261,52 @@ async def deidentify_string( Parameters ---------- - vault_id : VaultId - text : str - String to de-identify. + Text to de-identify. - configuration_id : typing.Optional[ConfigurationId] + vault_id : str + ID of a vault that you have Detect Invoker or Vault Owner permissions for. - entity_types : typing.Optional[EntityTypes] + entity_types : typing.Optional[typing.Sequence[DeidentifyStringRequestEntityTypesItem]] + Entities to detect and de-identify. - token_type : typing.Optional[TokenType] + token_type : typing.Optional[TokenTypeMapping] - allow_regex : typing.Optional[AllowRegex] + allow_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to display in plaintext. Entities appear in plaintext if an expression matches either the entirety of a detected entity or a substring of it. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. - restrict_regex : typing.Optional[RestrictRegex] + restrict_regex : typing.Optional[typing.Sequence[str]] + Regular expressions to replace with '[RESTRICTED]'. Expressions must match the entirety of a detected entity, not just a substring, for the entity to be restricted. Expressions don't match across entity boundaries. If a string or entity matches both `allow_regex` and `restrict_regex`, the entity is displayed in plaintext. If a string is detected as an entity and a `restrict_regex` pattern matches the entire detected entity, the entity is replaced with '[RESTRICTED]'. If a string is detected as an entity but a `restrict_regex` pattern only matches a substring of it, the `restrict_regex` pattern is ignored, and the entity is processed according to the specified tokenization and transformation settings. transformations : typing.Optional[Transformations] + configuration_id : typing.Optional[str] + ID of the Detect configuration to use for de-identification. Can't be specified with fields other than `vault_id`, `text`, and `file`. + request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- AsyncHttpResponse[DeidentifyStringResponse] - A successful response. + OK """ _response = await self._client_wrapper.httpx_client.request( "v1/detect/deidentify/string", method="POST", json={ - "vault_id": vault_id, "text": text, - "configuration_id": configuration_id, + "vault_id": vault_id, "entity_types": entity_types, "token_type": convert_and_respect_annotation_metadata( - object_=token_type, annotation=TokenType, direction="write" + object_=token_type, annotation=TokenTypeMapping, direction="write" ), "allow_regex": allow_regex, "restrict_regex": restrict_regex, "transformations": convert_and_respect_annotation_metadata( object_=transformations, annotation=Transformations, direction="write" ), + "configuration_id": configuration_id, }, headers={ "content-type": "application/json", @@ -362,32 +365,31 @@ async def deidentify_string( async def reidentify_string( self, *, - text: str, - vault_id: str, - format: typing.Optional[ReidentifyStringRequestFormat] = OMIT, + text: typing.Optional[str] = OMIT, + vault_id: typing.Optional[str] = OMIT, + format: typing.Optional[Format] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ReidentifyStringResponse]: + ) -> AsyncHttpResponse[IdentifyResponse]: """ Re-identifies tokens in a string. Parameters ---------- - text : str - String to re-identify. + text : typing.Optional[str] + Text to reidentify. - vault_id : str + vault_id : typing.Optional[str] ID of the vault where the entities are stored. - format : typing.Optional[ReidentifyStringRequestFormat] - Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. + format : typing.Optional[Format] request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - AsyncHttpResponse[ReidentifyStringResponse] - A successful response. + AsyncHttpResponse[IdentifyResponse] + OK """ _response = await self._client_wrapper.httpx_client.request( "v1/detect/reidentify/string", @@ -395,9 +397,7 @@ async def reidentify_string( json={ "text": text, "vault_id": vault_id, - "format": convert_and_respect_annotation_metadata( - object_=format, annotation=ReidentifyStringRequestFormat, direction="write" - ), + "format": convert_and_respect_annotation_metadata(object_=format, annotation=Format, direction="write"), }, headers={ "content-type": "application/json", @@ -408,9 +408,9 @@ async def reidentify_string( try: if 200 <= _response.status_code < 300: _data = typing.cast( - ReidentifyStringResponse, + IdentifyResponse, parse_obj_as( - type_=ReidentifyStringResponse, # type: ignore + type_=IdentifyResponse, # type: ignore object_=_response.json(), ), ) diff --git a/skyflow/generated/rest/strings/types/__init__.py b/skyflow/generated/rest/strings/types/__init__.py index 97d06583..592fae37 100644 --- a/skyflow/generated/rest/strings/types/__init__.py +++ b/skyflow/generated/rest/strings/types/__init__.py @@ -2,6 +2,6 @@ # isort: skip_file -from .reidentify_string_request_format import ReidentifyStringRequestFormat +from .deidentify_string_request_entity_types_item import DeidentifyStringRequestEntityTypesItem -__all__ = ["ReidentifyStringRequestFormat"] +__all__ = ["DeidentifyStringRequestEntityTypesItem"] diff --git a/skyflow/generated/rest/strings/types/deidentify_string_request_entity_types_item.py b/skyflow/generated/rest/strings/types/deidentify_string_request_entity_types_item.py new file mode 100644 index 00000000..bde5d084 --- /dev/null +++ b/skyflow/generated/rest/strings/types/deidentify_string_request_entity_types_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifyStringRequestEntityTypesItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/strings/types/reidentify_string_request_format.py b/skyflow/generated/rest/strings/types/reidentify_string_request_format.py deleted file mode 100644 index bfda392c..00000000 --- a/skyflow/generated/rest/strings/types/reidentify_string_request_format.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...types.entity_type import EntityType - - -class ReidentifyStringRequestFormat(UniversalBaseModel): - """ - Mapping of perferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. - """ - - redacted: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to fully redact. - """ - - masked: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to mask. - """ - - plaintext: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to return in plaintext. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index aa9b4a35..75979081 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -2,49 +2,69 @@ # isort: skip_file -from .allow_regex import AllowRegex from .audit_event_audit_resource_type import AuditEventAuditResourceType from .audit_event_context import AuditEventContext from .audit_event_data import AuditEventData from .audit_event_http_info import AuditEventHttpInfo from .batch_record_method import BatchRecordMethod -from .check_guardrails_response import CheckGuardrailsResponse -from .check_guardrails_response_validation import CheckGuardrailsResponseValidation -from .configuration_id import ConfigurationId from .context_access_type import ContextAccessType from .context_auth_mode import ContextAuthMode -from .deidentify_file_output import DeidentifyFileOutput -from .deidentify_file_output_processed_file_type import DeidentifyFileOutputProcessedFileType +from .deidentified_file_output import DeidentifiedFileOutput +from .deidentified_file_output_processed_file_extension import DeidentifiedFileOutputProcessedFileExtension +from .deidentified_file_output_processed_file_type import DeidentifiedFileOutputProcessedFileType from .deidentify_file_response import DeidentifyFileResponse -from .deidentify_status_response import DeidentifyStatusResponse -from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType -from .deidentify_status_response_status import DeidentifyStatusResponseStatus from .deidentify_string_response import DeidentifyStringResponse -from .detected_entity import DetectedEntity +from .detect_guardrails_response import DetectGuardrailsResponse +from .detect_guardrails_response_validation import DetectGuardrailsResponseValidation +from .detect_runs_response import DetectRunsResponse +from .detect_runs_response_output_type import DetectRunsResponseOutputType +from .detect_runs_response_status import DetectRunsResponseStatus from .detokenize_record_response_value_type import DetokenizeRecordResponseValueType -from .entity_location import EntityLocation -from .entity_type import EntityType -from .entity_types import EntityTypes from .error_response import ErrorResponse from .error_response_error import ErrorResponseError -from .error_string import ErrorString +from .file_data import FileData +from .file_data_data_format import FileDataDataFormat +from .file_data_deidentify_audio import FileDataDeidentifyAudio +from .file_data_deidentify_audio_data_format import FileDataDeidentifyAudioDataFormat +from .file_data_deidentify_document import FileDataDeidentifyDocument +from .file_data_deidentify_document_data_format import FileDataDeidentifyDocumentDataFormat +from .file_data_deidentify_image import FileDataDeidentifyImage +from .file_data_deidentify_image_data_format import FileDataDeidentifyImageDataFormat +from .file_data_deidentify_pdf import FileDataDeidentifyPdf +from .file_data_deidentify_presentation import FileDataDeidentifyPresentation +from .file_data_deidentify_presentation_data_format import FileDataDeidentifyPresentationDataFormat +from .file_data_deidentify_spreadsheet import FileDataDeidentifySpreadsheet +from .file_data_deidentify_spreadsheet_data_format import FileDataDeidentifySpreadsheetDataFormat +from .file_data_deidentify_structured_text import FileDataDeidentifyStructuredText +from .file_data_deidentify_structured_text_data_format import FileDataDeidentifyStructuredTextDataFormat +from .file_data_deidentify_text import FileDataDeidentifyText +from .file_data_reidentify_file import FileDataReidentifyFile +from .file_data_reidentify_file_data_format import FileDataReidentifyFileDataFormat +from .format import Format +from .format_masked_item import FormatMaskedItem +from .format_plaintext_item import FormatPlaintextItem +from .format_redacted_item import FormatRedactedItem from .googlerpc_status import GooglerpcStatus +from .http_code import HttpCode +from .identify_response import IdentifyResponse from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction +from .reidentified_file_output import ReidentifiedFileOutput +from .reidentified_file_output_processed_file_extension import ReidentifiedFileOutputProcessedFileExtension from .reidentify_file_response import ReidentifyFileResponse -from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus -from .reidentify_string_response import ReidentifyStringResponse from .request_action_type import RequestActionType from .resource_id import ResourceId -from .restrict_regex import RestrictRegex -from .token_type import TokenType -from .token_type_default import TokenTypeDefault -from .token_type_without_vault import TokenTypeWithoutVault -from .token_type_without_vault_default import TokenTypeWithoutVaultDefault +from .shift_dates import ShiftDates +from .shift_dates_entity_types_item import ShiftDatesEntityTypesItem +from .string_response_entities import StringResponseEntities +from .token_type_mapping import TokenTypeMapping +from .token_type_mapping_default import TokenTypeMappingDefault +from .token_type_mapping_entity_only_item import TokenTypeMappingEntityOnlyItem +from .token_type_mapping_entity_unq_counter_item import TokenTypeMappingEntityUnqCounterItem +from .token_type_mapping_vault_token_item import TokenTypeMappingVaultTokenItem from .transformations import Transformations -from .transformations_shift_dates import TransformationsShiftDates -from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem from .upload_file_v_2_response import UploadFileV2Response from .uuid_ import Uuid from .v_1_audit_after_options import V1AuditAfterOptions @@ -78,52 +98,72 @@ from .v_1_update_record_response import V1UpdateRecordResponse from .v_1_vault_field_mapping import V1VaultFieldMapping from .v_1_vault_schema_config import V1VaultSchemaConfig -from .vault_id import VaultId +from .word_character_count import WordCharacterCount __all__ = [ - "AllowRegex", "AuditEventAuditResourceType", "AuditEventContext", "AuditEventData", "AuditEventHttpInfo", "BatchRecordMethod", - "CheckGuardrailsResponse", - "CheckGuardrailsResponseValidation", - "ConfigurationId", "ContextAccessType", "ContextAuthMode", - "DeidentifyFileOutput", - "DeidentifyFileOutputProcessedFileType", + "DeidentifiedFileOutput", + "DeidentifiedFileOutputProcessedFileExtension", + "DeidentifiedFileOutputProcessedFileType", "DeidentifyFileResponse", - "DeidentifyStatusResponse", - "DeidentifyStatusResponseOutputType", - "DeidentifyStatusResponseStatus", "DeidentifyStringResponse", - "DetectedEntity", + "DetectGuardrailsResponse", + "DetectGuardrailsResponseValidation", + "DetectRunsResponse", + "DetectRunsResponseOutputType", + "DetectRunsResponseStatus", "DetokenizeRecordResponseValueType", - "EntityLocation", - "EntityType", - "EntityTypes", "ErrorResponse", "ErrorResponseError", - "ErrorString", + "FileData", + "FileDataDataFormat", + "FileDataDeidentifyAudio", + "FileDataDeidentifyAudioDataFormat", + "FileDataDeidentifyDocument", + "FileDataDeidentifyDocumentDataFormat", + "FileDataDeidentifyImage", + "FileDataDeidentifyImageDataFormat", + "FileDataDeidentifyPdf", + "FileDataDeidentifyPresentation", + "FileDataDeidentifyPresentationDataFormat", + "FileDataDeidentifySpreadsheet", + "FileDataDeidentifySpreadsheetDataFormat", + "FileDataDeidentifyStructuredText", + "FileDataDeidentifyStructuredTextDataFormat", + "FileDataDeidentifyText", + "FileDataReidentifyFile", + "FileDataReidentifyFileDataFormat", + "Format", + "FormatMaskedItem", + "FormatPlaintextItem", + "FormatRedactedItem", "GooglerpcStatus", + "HttpCode", + "IdentifyResponse", "ProtobufAny", "RedactionEnumRedaction", + "ReidentifiedFileOutput", + "ReidentifiedFileOutputProcessedFileExtension", "ReidentifyFileResponse", - "ReidentifyFileResponseOutput", + "ReidentifyFileResponseOutputType", "ReidentifyFileResponseStatus", - "ReidentifyStringResponse", "RequestActionType", "ResourceId", - "RestrictRegex", - "TokenType", - "TokenTypeDefault", - "TokenTypeWithoutVault", - "TokenTypeWithoutVaultDefault", + "ShiftDates", + "ShiftDatesEntityTypesItem", + "StringResponseEntities", + "TokenTypeMapping", + "TokenTypeMappingDefault", + "TokenTypeMappingEntityOnlyItem", + "TokenTypeMappingEntityUnqCounterItem", + "TokenTypeMappingVaultTokenItem", "Transformations", - "TransformationsShiftDates", - "TransformationsShiftDatesEntityTypesItem", "UploadFileV2Response", "Uuid", "V1AuditAfterOptions", @@ -157,5 +197,5 @@ "V1UpdateRecordResponse", "V1VaultFieldMapping", "V1VaultSchemaConfig", - "VaultId", + "WordCharacterCount", ] diff --git a/skyflow/generated/rest/types/allow_regex.py b/skyflow/generated/rest/types/allow_regex.py deleted file mode 100644 index f4164375..00000000 --- a/skyflow/generated/rest/types/allow_regex.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -AllowRegex = typing.List[str] diff --git a/skyflow/generated/rest/types/check_guardrails_response_validation.py b/skyflow/generated/rest/types/check_guardrails_response_validation.py deleted file mode 100644 index dcb0b789..00000000 --- a/skyflow/generated/rest/types/check_guardrails_response_validation.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -CheckGuardrailsResponseValidation = typing.Union[typing.Literal["failed", "passed"], typing.Any] diff --git a/skyflow/generated/rest/types/configuration_id.py b/skyflow/generated/rest/types/configuration_id.py deleted file mode 100644 index 763ae161..00000000 --- a/skyflow/generated/rest/types/configuration_id.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -ConfigurationId = str diff --git a/skyflow/generated/rest/types/deidentified_file_output.py b/skyflow/generated/rest/types/deidentified_file_output.py new file mode 100644 index 00000000..387f57f2 --- /dev/null +++ b/skyflow/generated/rest/types/deidentified_file_output.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .deidentified_file_output_processed_file_extension import DeidentifiedFileOutputProcessedFileExtension +from .deidentified_file_output_processed_file_type import DeidentifiedFileOutputProcessedFileType + + +class DeidentifiedFileOutput(UniversalBaseModel): + """ + Details of output files. Files are specified as Base64-encoded data. + """ + + processed_file: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="processedFile")] = ( + pydantic.Field(default=None) + ) + """ + File content in Base64 format. + """ + + processed_file_type: typing_extensions.Annotated[ + typing.Optional[DeidentifiedFileOutputProcessedFileType], FieldMetadata(alias="processedFileType") + ] = pydantic.Field(default=None) + """ + Type of the processed file. + """ + + processed_file_extension: typing_extensions.Annotated[ + typing.Optional[DeidentifiedFileOutputProcessedFileExtension], FieldMetadata(alias="processedFileExtension") + ] = pydantic.Field(default=None) + """ + Extension of the processed file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentified_file_output_processed_file_extension.py b/skyflow/generated/rest/types/deidentified_file_output_processed_file_extension.py new file mode 100644 index 00000000..dc3a8519 --- /dev/null +++ b/skyflow/generated/rest/types/deidentified_file_output_processed_file_extension.py @@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DeidentifiedFileOutputProcessedFileExtension = typing.Union[ + typing.Literal[ + "mp3", + "wav", + "pdf", + "txt", + "csv", + "json", + "jpg", + "jpeg", + "tif", + "tiff", + "png", + "bmp", + "xls", + "xlsx", + "doc", + "docx", + "ppt", + "pptx", + "xml", + "dcm", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py b/skyflow/generated/rest/types/deidentified_file_output_processed_file_type.py similarity index 55% rename from skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py rename to skyflow/generated/rest/types/deidentified_file_output_processed_file_type.py index 332ce445..ca5aadfc 100644 --- a/skyflow/generated/rest/types/deidentify_file_output_processed_file_type.py +++ b/skyflow/generated/rest/types/deidentified_file_output_processed_file_type.py @@ -2,18 +2,15 @@ import typing -DeidentifyFileOutputProcessedFileType = typing.Union[ +DeidentifiedFileOutputProcessedFileType = typing.Union[ typing.Literal[ - "entities", - "plaintext_transcription", "redacted_audio", - "redacted_diarized_transcription", - "redacted_file", "redacted_image", - "redacted_medical_diarized_transcription", - "redacted_medical_transcription", - "redacted_text", "redacted_transcription", + "redacted_file", + "redacted_text", + "entities", + "redacted_transcription_diarize_json", ], typing.Any, ] diff --git a/skyflow/generated/rest/types/deidentify_file_response.py b/skyflow/generated/rest/types/deidentify_file_response.py index e4e6bf35..4c4503f4 100644 --- a/skyflow/generated/rest/types/deidentify_file_response.py +++ b/skyflow/generated/rest/types/deidentify_file_response.py @@ -8,12 +8,12 @@ class DeidentifyFileResponse(UniversalBaseModel): """ - Response to de-identify a file. + Response to deidentify a file. """ - run_id: str = pydantic.Field() + run_id: typing.Optional[str] = pydantic.Field(default=None) """ - Status URL for the detect run. + Status URL for the Detect run. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/deidentify_status_response.py b/skyflow/generated/rest/types/deidentify_status_response.py deleted file mode 100644 index 68a6cd3f..00000000 --- a/skyflow/generated/rest/types/deidentify_status_response.py +++ /dev/null @@ -1,74 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .deidentify_file_output import DeidentifyFileOutput -from .deidentify_status_response_output_type import DeidentifyStatusResponseOutputType -from .deidentify_status_response_status import DeidentifyStatusResponseStatus - - -class DeidentifyStatusResponse(UniversalBaseModel): - """ - Response to get the status of a detect run. - """ - - status: DeidentifyStatusResponseStatus = pydantic.Field() - """ - Status of the detect run. - """ - - output: typing.List[DeidentifyFileOutput] = pydantic.Field() - """ - How the input file was specified. - """ - - output_type: typing.Optional[DeidentifyStatusResponseOutputType] = pydantic.Field(default=None) - """ - How the output file is specified. - """ - - message: str = pydantic.Field() - """ - Status details about the detect run. - """ - - word_count: typing.Optional[int] = pydantic.Field(default=None) - """ - Number of words in the processed text. - """ - - character_count: typing.Optional[int] = pydantic.Field(default=None) - """ - Number of characters in the processed text. - """ - - size: typing.Optional[float] = pydantic.Field(default=None) - """ - Size of the processed text in kilobytes (KB). - """ - - duration: typing.Optional[float] = pydantic.Field(default=None) - """ - Duration of the processed audio in seconds. - """ - - pages: typing.Optional[int] = pydantic.Field(default=None) - """ - Number of pages in the processed PDF. - """ - - slides: typing.Optional[int] = pydantic.Field(default=None) - """ - Number of slides in the processed presentation. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/deidentify_status_response_output_type.py b/skyflow/generated/rest/types/deidentify_status_response_output_type.py deleted file mode 100644 index 051cc31a..00000000 --- a/skyflow/generated/rest/types/deidentify_status_response_output_type.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyStatusResponseOutputType = typing.Union[typing.Literal["BASE64", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/deidentify_status_response_status.py b/skyflow/generated/rest/types/deidentify_status_response_status.py deleted file mode 100644 index 9ec2931b..00000000 --- a/skyflow/generated/rest/types/deidentify_status_response_status.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DeidentifyStatusResponseStatus = typing.Union[typing.Literal["FAILED", "IN_PROGRESS", "SUCCESS", "UNKNOWN"], typing.Any] diff --git a/skyflow/generated/rest/types/deidentify_string_response.py b/skyflow/generated/rest/types/deidentify_string_response.py index c141f841..4655f80b 100644 --- a/skyflow/generated/rest/types/deidentify_string_response.py +++ b/skyflow/generated/rest/types/deidentify_string_response.py @@ -4,7 +4,7 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .detected_entity import DetectedEntity +from .string_response_entities import StringResponseEntities class DeidentifyStringResponse(UniversalBaseModel): @@ -12,22 +12,22 @@ class DeidentifyStringResponse(UniversalBaseModel): Response to deidentify a string. """ - processed_text: str = pydantic.Field() + processed_text: typing.Optional[str] = pydantic.Field(default=None) """ De-identified text. """ - entities: typing.List[DetectedEntity] = pydantic.Field() + entities: typing.Optional[typing.List[StringResponseEntities]] = pydantic.Field(default=None) """ Detected entities. """ - word_count: int = pydantic.Field() + word_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of words from the input text. """ - character_count: int = pydantic.Field() + character_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of characters from the input text. """ diff --git a/skyflow/generated/rest/types/check_guardrails_response.py b/skyflow/generated/rest/types/detect_guardrails_response.py similarity index 52% rename from skyflow/generated/rest/types/check_guardrails_response.py rename to skyflow/generated/rest/types/detect_guardrails_response.py index ad8e2dbf..2290ac52 100644 --- a/skyflow/generated/rest/types/check_guardrails_response.py +++ b/skyflow/generated/rest/types/detect_guardrails_response.py @@ -4,32 +4,28 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .check_guardrails_response_validation import CheckGuardrailsResponseValidation +from .detect_guardrails_response_validation import DetectGuardrailsResponseValidation -class CheckGuardrailsResponse(UniversalBaseModel): - """ - Response to check guardrails. - """ - - text: typing.Optional[str] = pydantic.Field(default=None) +class DetectGuardrailsResponse(UniversalBaseModel): + text: str = pydantic.Field() """ Text that was checked against guardrails. """ - toxicity: typing.Optional[bool] = pydantic.Field(default=None) + toxic: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the text is toxic. """ - denied_topics: typing.Optional[bool] = pydantic.Field(default=None) + denied_topic: typing.Optional[bool] = pydantic.Field(default=None) """ - Whether any denied topics were found. + Whether the text included a denied topic. """ - validation: typing.Optional[CheckGuardrailsResponseValidation] = pydantic.Field(default=None) + validation: DetectGuardrailsResponseValidation = pydantic.Field() """ - Validation result. + Whether the text passed validation. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/detect_guardrails_response_validation.py b/skyflow/generated/rest/types/detect_guardrails_response_validation.py new file mode 100644 index 00000000..5a59ddb0 --- /dev/null +++ b/skyflow/generated/rest/types/detect_guardrails_response_validation.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectGuardrailsResponseValidation = typing.Union[typing.Literal["failed", "passed"], typing.Any] diff --git a/skyflow/generated/rest/types/detect_runs_response.py b/skyflow/generated/rest/types/detect_runs_response.py new file mode 100644 index 00000000..e49cca0a --- /dev/null +++ b/skyflow/generated/rest/types/detect_runs_response.py @@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .deidentified_file_output import DeidentifiedFileOutput +from .detect_runs_response_output_type import DetectRunsResponseOutputType +from .detect_runs_response_status import DetectRunsResponseStatus +from .word_character_count import WordCharacterCount + + +class DetectRunsResponse(UniversalBaseModel): + """ + Response to get the status of a file deidentification request. + """ + + status: typing.Optional[DetectRunsResponseStatus] = pydantic.Field(default=None) + """ + Status of the operation. + """ + + output_type: typing_extensions.Annotated[ + typing.Optional[DetectRunsResponseOutputType], FieldMetadata(alias="outputType") + ] = pydantic.Field(default=None) + """ + Format of the output file. + """ + + output: typing.Optional[typing.List[DeidentifiedFileOutput]] = pydantic.Field(default=None) + """ + Details of output files. Files are specified as Base64-encoded data. + """ + + message: typing.Optional[str] = pydantic.Field(default=None) + """ + Status details about the Detect run. + """ + + size: typing.Optional[float] = pydantic.Field(default=None) + """ + Size of the processed file in kilobytes (KB). + """ + + word_character_count: typing_extensions.Annotated[ + typing.Optional[WordCharacterCount], FieldMetadata(alias="wordCharacterCount") + ] = None + duration: typing.Optional[float] = pydantic.Field(default=None) + """ + Duration of the processed audio in seconds. + """ + + pages: typing.Optional[int] = pydantic.Field(default=None) + """ + Number of pages in the processed PDF. + """ + + slides: typing.Optional[int] = pydantic.Field(default=None) + """ + Number of slides in the processed presentation. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/detect_runs_response_output_type.py b/skyflow/generated/rest/types/detect_runs_response_output_type.py new file mode 100644 index 00000000..a24870c1 --- /dev/null +++ b/skyflow/generated/rest/types/detect_runs_response_output_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectRunsResponseOutputType = typing.Union[typing.Literal["UNKNOWN", "BASE64"], typing.Any] diff --git a/skyflow/generated/rest/types/detect_runs_response_status.py b/skyflow/generated/rest/types/detect_runs_response_status.py new file mode 100644 index 00000000..979454c6 --- /dev/null +++ b/skyflow/generated/rest/types/detect_runs_response_status.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DetectRunsResponseStatus = typing.Union[typing.Literal["UNKNOWN", "FAILED", "SUCCESS", "IN_PROGRESS"], typing.Any] diff --git a/skyflow/generated/rest/types/entity_location.py b/skyflow/generated/rest/types/entity_location.py deleted file mode 100644 index 487f9c72..00000000 --- a/skyflow/generated/rest/types/entity_location.py +++ /dev/null @@ -1,41 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel - - -class EntityLocation(UniversalBaseModel): - """ - Locations of an entity in the text. - """ - - start_index: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the first character of the string in the original text. - """ - - end_index: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the last character of the string in the original text. - """ - - start_index_processed: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the first character of the string in the processed text. - """ - - end_index_processed: typing.Optional[int] = pydantic.Field(default=None) - """ - Index of the last character of the string in the processed text. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/entity_types.py b/skyflow/generated/rest/types/entity_types.py deleted file mode 100644 index 3adb0438..00000000 --- a/skyflow/generated/rest/types/entity_types.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .entity_type import EntityType - -EntityTypes = typing.List[EntityType] diff --git a/skyflow/generated/rest/types/error_response_error.py b/skyflow/generated/rest/types/error_response_error.py index 722b69cc..efe080d3 100644 --- a/skyflow/generated/rest/types/error_response_error.py +++ b/skyflow/generated/rest/types/error_response_error.py @@ -4,6 +4,7 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .http_code import HttpCode class ErrorResponseError(UniversalBaseModel): @@ -12,16 +13,8 @@ class ErrorResponseError(UniversalBaseModel): gRPC status codes. See https://grpc.io/docs/guides/status-codes. """ - http_code: int = pydantic.Field() - """ - HTTP status codes. See https://developer.mozilla.org/en-US/docs/Web/HTTP/Status. - """ - - http_status: str = pydantic.Field() - """ - HTTP status message. - """ - + http_code: HttpCode + http_status: str message: str details: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None diff --git a/skyflow/generated/rest/types/error_string.py b/skyflow/generated/rest/types/error_string.py deleted file mode 100644 index 4ebbdff4..00000000 --- a/skyflow/generated/rest/types/error_string.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -ErrorString = str \ No newline at end of file diff --git a/skyflow/generated/rest/types/reidentify_file_response_output.py b/skyflow/generated/rest/types/file_data.py similarity index 53% rename from skyflow/generated/rest/types/reidentify_file_response_output.py rename to skyflow/generated/rest/types/file_data.py index bda44777..846837e0 100644 --- a/skyflow/generated/rest/types/reidentify_file_response_output.py +++ b/skyflow/generated/rest/types/file_data.py @@ -3,23 +3,25 @@ import typing import pydantic +import typing_extensions from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_data_format import FileDataDataFormat -class ReidentifyFileResponseOutput(UniversalBaseModel): - processed_file: str = pydantic.Field() +class FileData(UniversalBaseModel): """ - Re-identified file content in base64 format. + File to process. Files are specified as Base64-encoded data. """ - processed_file_type: typing.Literal["reidentified_file"] = pydantic.Field(default="reidentified_file") + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Type of the processed file. + Base64-encoded data of the file. """ - processed_file_extension: str = pydantic.Field() + data_format: FileDataDataFormat = pydantic.Field() """ - Extension of the processed file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py b/skyflow/generated/rest/types/file_data_data_format.py similarity index 89% rename from skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py rename to skyflow/generated/rest/types/file_data_data_format.py index f3294014..48205005 100644 --- a/skyflow/generated/rest/files/types/deidentify_file_request_file_data_format.py +++ b/skyflow/generated/rest/types/file_data_data_format.py @@ -2,28 +2,29 @@ import typing -DeidentifyFileRequestFileDataFormat = typing.Union[ +FileDataDataFormat = typing.Union[ typing.Literal[ - "bmp", - "csv", - "dcm", - "doc", - "docx", - "jpeg", - "jpg", - "json", "mp3", + "wav", "pdf", - "png", - "ppt", - "pptx", + "txt", + "csv", + "json", + "jpg", + "jpeg", "tif", "tiff", - "txt", - "wav", + "png", + "bmp", "xls", "xlsx", + "doc", + "docx", + "ppt", + "pptx", "xml", + "dcm", + "jsonl", ], typing.Any, ] diff --git a/skyflow/generated/rest/files/types/deidentify_file_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_audio.py similarity index 53% rename from skyflow/generated/rest/files/types/deidentify_file_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_audio.py index 3e062bb2..8973b22a 100644 --- a/skyflow/generated/rest/files/types/deidentify_file_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_audio.py @@ -4,24 +4,24 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_file_request_file_data_format import DeidentifyFileRequestFileDataFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_audio_data_format import FileDataDeidentifyAudioDataFormat -class DeidentifyFileRequestFile(UniversalBaseModel): +class FileDataDeidentifyAudio(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ - data_format: DeidentifyFileRequestFileDataFormat = pydantic.Field() + data_format: FileDataDeidentifyAudioDataFormat = pydantic.Field() """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_audio_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_audio_data_format.py new file mode 100644 index 00000000..85f163c2 --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_audio_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifyAudioDataFormat = typing.Union[typing.Literal["mp3", "wav"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_audio_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_document.py similarity index 54% rename from skyflow/generated/rest/files/types/deidentify_audio_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_document.py index 3ea4c16f..ef9b61f2 100644 --- a/skyflow/generated/rest/files/types/deidentify_audio_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_document.py @@ -4,24 +4,24 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_audio_request_file_data_format import DeidentifyAudioRequestFileDataFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_document_data_format import FileDataDeidentifyDocumentDataFormat -class DeidentifyAudioRequestFile(UniversalBaseModel): +class FileDataDeidentifyDocument(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ - data_format: DeidentifyAudioRequestFileDataFormat = pydantic.Field() + data_format: FileDataDeidentifyDocumentDataFormat = pydantic.Field() """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_document_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_document_data_format.py new file mode 100644 index 00000000..beaf1ffc --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_document_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifyDocumentDataFormat = typing.Union[typing.Literal["pdf", "doc", "docx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_image_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_image.py similarity index 53% rename from skyflow/generated/rest/files/types/deidentify_image_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_image.py index 36677964..c1f76298 100644 --- a/skyflow/generated/rest/files/types/deidentify_image_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_image.py @@ -4,24 +4,24 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_image_request_file_data_format import DeidentifyImageRequestFileDataFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_image_data_format import FileDataDeidentifyImageDataFormat -class DeidentifyImageRequestFile(UniversalBaseModel): +class FileDataDeidentifyImage(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ - data_format: DeidentifyImageRequestFileDataFormat = pydantic.Field() + data_format: FileDataDeidentifyImageDataFormat = pydantic.Field() """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_image_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_image_data_format.py new file mode 100644 index 00000000..97dab89e --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_image_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifyImageDataFormat = typing.Union[typing.Literal["jpg", "jpeg", "tif", "tiff", "png", "bmp"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_pdf_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_pdf.py similarity index 66% rename from skyflow/generated/rest/files/types/deidentify_pdf_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_pdf.py index da461fd1..0dc2c1a5 100644 --- a/skyflow/generated/rest/files/types/deidentify_pdf_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_pdf.py @@ -4,23 +4,23 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata -class DeidentifyPdfRequestFile(UniversalBaseModel): +class FileDataDeidentifyPdf(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ data_format: typing.Literal["pdf"] = pydantic.Field(default="pdf") """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_presentation.py b/skyflow/generated/rest/types/file_data_deidentify_presentation.py new file mode 100644 index 00000000..17fa004e --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_presentation.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_presentation_data_format import FileDataDeidentifyPresentationDataFormat + + +class FileDataDeidentifyPresentation(UniversalBaseModel): + """ + File to process. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file. + """ + + data_format: FileDataDeidentifyPresentationDataFormat = pydantic.Field() + """ + Format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/file_data_deidentify_presentation_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_presentation_data_format.py new file mode 100644 index 00000000..d811746d --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_presentation_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifyPresentationDataFormat = typing.Union[typing.Literal["ppt", "pptx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_document_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_spreadsheet.py similarity index 53% rename from skyflow/generated/rest/files/types/deidentify_document_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_spreadsheet.py index cbf36c59..17ead2f1 100644 --- a/skyflow/generated/rest/files/types/deidentify_document_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_spreadsheet.py @@ -4,24 +4,24 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_document_request_file_data_format import DeidentifyDocumentRequestFileDataFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_spreadsheet_data_format import FileDataDeidentifySpreadsheetDataFormat -class DeidentifyDocumentRequestFile(UniversalBaseModel): +class FileDataDeidentifySpreadsheet(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ - data_format: DeidentifyDocumentRequestFileDataFormat = pydantic.Field() + data_format: FileDataDeidentifySpreadsheetDataFormat = pydantic.Field() """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_spreadsheet_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_spreadsheet_data_format.py new file mode 100644 index 00000000..f48a11aa --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_spreadsheet_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifySpreadsheetDataFormat = typing.Union[typing.Literal["csv", "xls", "xlsx"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_structured_text.py similarity index 53% rename from skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_structured_text.py index f97e1c03..a78a11ab 100644 --- a/skyflow/generated/rest/files/types/deidentify_spreadsheet_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_structured_text.py @@ -4,24 +4,24 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata -from .deidentify_spreadsheet_request_file_data_format import DeidentifySpreadsheetRequestFileDataFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_deidentify_structured_text_data_format import FileDataDeidentifyStructuredTextDataFormat -class DeidentifySpreadsheetRequestFile(UniversalBaseModel): +class FileDataDeidentifyStructuredText(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ - data_format: DeidentifySpreadsheetRequestFileDataFormat = pydantic.Field() + data_format: FileDataDeidentifyStructuredTextDataFormat = pydantic.Field() """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_deidentify_structured_text_data_format.py b/skyflow/generated/rest/types/file_data_deidentify_structured_text_data_format.py new file mode 100644 index 00000000..267b17a6 --- /dev/null +++ b/skyflow/generated/rest/types/file_data_deidentify_structured_text_data_format.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataDeidentifyStructuredTextDataFormat = typing.Union[typing.Literal["json", "xml"], typing.Any] diff --git a/skyflow/generated/rest/files/types/deidentify_text_request_file.py b/skyflow/generated/rest/types/file_data_deidentify_text.py similarity index 66% rename from skyflow/generated/rest/files/types/deidentify_text_request_file.py rename to skyflow/generated/rest/types/file_data_deidentify_text.py index 193aa7bd..c8637361 100644 --- a/skyflow/generated/rest/files/types/deidentify_text_request_file.py +++ b/skyflow/generated/rest/types/file_data_deidentify_text.py @@ -4,23 +4,23 @@ import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata -class DeidentifyTextRequestFile(UniversalBaseModel): +class FileDataDeidentifyText(UniversalBaseModel): """ - File to de-identify. Files are specified as Base64-encoded data. + File to process. Files are specified as Base64-encoded data. """ base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() """ - Base64-encoded data of the file to de-identify. + Base64-encoded data of the file. """ data_format: typing.Literal["txt"] = pydantic.Field(default="txt") """ - Data format of the file. + Format of the file. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/file_data_reidentify_file.py b/skyflow/generated/rest/types/file_data_reidentify_file.py new file mode 100644 index 00000000..d691bcc0 --- /dev/null +++ b/skyflow/generated/rest/types/file_data_reidentify_file.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata +from .file_data_reidentify_file_data_format import FileDataReidentifyFileDataFormat + + +class FileDataReidentifyFile(UniversalBaseModel): + """ + File to process. Files are specified as Base64-encoded data. + """ + + base_64: typing_extensions.Annotated[str, FieldMetadata(alias="base64")] = pydantic.Field() + """ + Base64-encoded data of the file. + """ + + data_format: FileDataReidentifyFileDataFormat = pydantic.Field() + """ + Format of the file. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/file_data_reidentify_file_data_format.py b/skyflow/generated/rest/types/file_data_reidentify_file_data_format.py new file mode 100644 index 00000000..d0cc44ba --- /dev/null +++ b/skyflow/generated/rest/types/file_data_reidentify_file_data_format.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FileDataReidentifyFileDataFormat = typing.Union[ + typing.Literal["txt", "csv", "json", "xls", "xlsx", "doc", "docx", "xml"], typing.Any +] diff --git a/skyflow/generated/rest/files/types/reidentify_file_request_format.py b/skyflow/generated/rest/types/format.py similarity index 58% rename from skyflow/generated/rest/files/types/reidentify_file_request_format.py rename to skyflow/generated/rest/types/format.py index ec7ca5f1..a12992cd 100644 --- a/skyflow/generated/rest/files/types/reidentify_file_request_format.py +++ b/skyflow/generated/rest/types/format.py @@ -3,26 +3,28 @@ import typing import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from ...types.entity_type import EntityType +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .format_masked_item import FormatMaskedItem +from .format_plaintext_item import FormatPlaintextItem +from .format_redacted_item import FormatRedactedItem -class ReidentifyFileRequestFormat(UniversalBaseModel): +class Format(UniversalBaseModel): """ Mapping of preferred data formatting options to entity types. Returned values are dependent on the configuration of the vault storing the data and the permissions of the user or account making the request. """ - redacted: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + redacted: typing.Optional[typing.List[FormatRedactedItem]] = pydantic.Field(default=None) """ Entity types to fully redact. """ - masked: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + masked: typing.Optional[typing.List[FormatMaskedItem]] = pydantic.Field(default=None) """ Entity types to mask. """ - plaintext: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) + plaintext: typing.Optional[typing.List[FormatPlaintextItem]] = pydantic.Field(default=None) """ Entity types to return in plaintext. """ diff --git a/skyflow/generated/rest/types/entity_type.py b/skyflow/generated/rest/types/format_masked_item.py similarity index 98% rename from skyflow/generated/rest/types/entity_type.py rename to skyflow/generated/rest/types/format_masked_item.py index 1a343410..eeae9770 100644 --- a/skyflow/generated/rest/types/entity_type.py +++ b/skyflow/generated/rest/types/format_masked_item.py @@ -2,78 +2,78 @@ import typing -EntityType = typing.Union[ +FormatMaskedItem = typing.Union[ typing.Literal[ - "account_number", "age", - "all", "bank_account", - "blood_type", - "condition", - "corporate_action", "credit_card", "credit_card_expiration", "cvv", "date", "date_interval", - "day", "dob", - "dose", "driver_license", - "drug", - "duration", - "effect", "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", "event", "filename", - "financial_metric", "gender", - "healthcare_number", - "injury", - "ip_address", "language", - "location", "location_address", - "location_address_street", "location_city", "location_coordinate", "location_country", "location_state", "location_zip", "marital_status", - "medical_code", - "medical_process", "money", - "month", - "name", - "name_family", - "name_given", "name_medical_professional", - "numerical_pii", "occupation", "organization", - "organization_id", "organization_medical_facility", "origin", "passport_number", "password", - "phone_number", "physical_attribute", "political_affiliation", - "product", - "project", "religion", - "routing_number", - "sexuality", - "ssn", - "statistics", "time", - "trend", - "url", "username", - "vehicle_id", - "year", "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", ], typing.Any, ] diff --git a/skyflow/generated/rest/types/format_plaintext_item.py b/skyflow/generated/rest/types/format_plaintext_item.py new file mode 100644 index 00000000..62dfc4bc --- /dev/null +++ b/skyflow/generated/rest/types/format_plaintext_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FormatPlaintextItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/format_redacted_item.py b/skyflow/generated/rest/types/format_redacted_item.py new file mode 100644 index 00000000..d2aee020 --- /dev/null +++ b/skyflow/generated/rest/types/format_redacted_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +FormatRedactedItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/vault_id.py b/skyflow/generated/rest/types/http_code.py similarity index 81% rename from skyflow/generated/rest/types/vault_id.py rename to skyflow/generated/rest/types/http_code.py index 02ae7d21..5fc9a3fb 100644 --- a/skyflow/generated/rest/types/vault_id.py +++ b/skyflow/generated/rest/types/http_code.py @@ -1,3 +1,3 @@ # This file was auto-generated by Fern from our API Definition. -VaultId = str +HttpCode = int diff --git a/skyflow/generated/rest/types/reidentify_string_response.py b/skyflow/generated/rest/types/identify_response.py similarity index 78% rename from skyflow/generated/rest/types/reidentify_string_response.py rename to skyflow/generated/rest/types/identify_response.py index cbb1b836..67786621 100644 --- a/skyflow/generated/rest/types/reidentify_string_response.py +++ b/skyflow/generated/rest/types/identify_response.py @@ -6,12 +6,12 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ReidentifyStringResponse(UniversalBaseModel): +class IdentifyResponse(UniversalBaseModel): """ - Re-identify string response. + Response after identifying text. """ - text: typing.Optional[str] = pydantic.Field(default=None) + text: str = pydantic.Field() """ Re-identified text. """ diff --git a/skyflow/generated/rest/types/deidentify_file_output.py b/skyflow/generated/rest/types/reidentified_file_output.py similarity index 56% rename from skyflow/generated/rest/types/deidentify_file_output.py rename to skyflow/generated/rest/types/reidentified_file_output.py index 7e17e168..c7f8544e 100644 --- a/skyflow/generated/rest/types/deidentify_file_output.py +++ b/skyflow/generated/rest/types/reidentified_file_output.py @@ -4,25 +4,27 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .deidentify_file_output_processed_file_type import DeidentifyFileOutputProcessedFileType +from .reidentified_file_output_processed_file_extension import ReidentifiedFileOutputProcessedFileExtension -class DeidentifyFileOutput(UniversalBaseModel): +class ReidentifiedFileOutput(UniversalBaseModel): """ - Details and contents of the processed file. + Details of output files. Files are specified as Base64-encoded data. """ processed_file: typing.Optional[str] = pydantic.Field(default=None) """ - URL or base64-encoded data of the output. + File content in Base64 format. """ - processed_file_type: typing.Optional[DeidentifyFileOutputProcessedFileType] = pydantic.Field(default=None) + processed_file_type: typing.Optional[typing.Literal["reidentified_file"]] = pydantic.Field(default=None) """ Type of the processed file. """ - processed_file_extension: typing.Optional[str] = pydantic.Field(default=None) + processed_file_extension: typing.Optional[ReidentifiedFileOutputProcessedFileExtension] = pydantic.Field( + default=None + ) """ Extension of the processed file. """ diff --git a/skyflow/generated/rest/types/reidentified_file_output_processed_file_extension.py b/skyflow/generated/rest/types/reidentified_file_output_processed_file_extension.py new file mode 100644 index 00000000..c6cbfd02 --- /dev/null +++ b/skyflow/generated/rest/types/reidentified_file_output_processed_file_extension.py @@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ReidentifiedFileOutputProcessedFileExtension = typing.Union[ + typing.Literal[ + "mp3", + "wav", + "pdf", + "txt", + "csv", + "json", + "jpg", + "jpeg", + "tif", + "tiff", + "png", + "bmp", + "xls", + "xlsx", + "doc", + "docx", + "ppt", + "pptx", + "xml", + "dcm", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/reidentify_file_response.py b/skyflow/generated/rest/types/reidentify_file_response.py index c67b41ac..ef076c72 100644 --- a/skyflow/generated/rest/types/reidentify_file_response.py +++ b/skyflow/generated/rest/types/reidentify_file_response.py @@ -4,26 +4,27 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .reidentify_file_response_output import ReidentifyFileResponseOutput +from .reidentified_file_output import ReidentifiedFileOutput +from .reidentify_file_response_output_type import ReidentifyFileResponseOutputType from .reidentify_file_response_status import ReidentifyFileResponseStatus class ReidentifyFileResponse(UniversalBaseModel): """ - Response to re-identify a file. + Response to get the status & response of a file re-identification request. """ - status: ReidentifyFileResponseStatus = pydantic.Field() + status: typing.Optional[ReidentifyFileResponseStatus] = pydantic.Field(default=None) """ - Status of the re-identify operation. + Status of the operation. """ - output_type: typing.Literal["BASE64"] = pydantic.Field(default="BASE64") + output_type: typing.Optional[ReidentifyFileResponseOutputType] = pydantic.Field(default=None) """ Format of the output file. """ - output: ReidentifyFileResponseOutput + output: typing.Optional[ReidentifiedFileOutput] = None if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/skyflow/generated/rest/types/reidentify_file_response_output_type.py b/skyflow/generated/rest/types/reidentify_file_response_output_type.py index 03048c85..b7b9e8c4 100644 --- a/skyflow/generated/rest/types/reidentify_file_response_output_type.py +++ b/skyflow/generated/rest/types/reidentify_file_response_output_type.py @@ -2,4 +2,4 @@ import typing -ReidentifyFileResponseOutputType = typing.Union[typing.Literal["BASE64", "UNKNOWN"], typing.Any] +ReidentifyFileResponseOutputType = typing.Union[typing.Literal["UNKNOWN", "BASE64"], typing.Any] diff --git a/skyflow/generated/rest/types/reidentify_file_response_status.py b/skyflow/generated/rest/types/reidentify_file_response_status.py index 8bdfa1e0..ffa5dfc3 100644 --- a/skyflow/generated/rest/types/reidentify_file_response_status.py +++ b/skyflow/generated/rest/types/reidentify_file_response_status.py @@ -2,4 +2,4 @@ import typing -ReidentifyFileResponseStatus = typing.Union[typing.Literal["FAILED", "IN_PROGRESS", "SUCCESS", "UNKNOWN"], typing.Any] +ReidentifyFileResponseStatus = typing.Union[typing.Literal["UNKNOWN", "FAILED", "SUCCESS", "IN_PROGRESS"], typing.Any] diff --git a/skyflow/generated/rest/types/restrict_regex.py b/skyflow/generated/rest/types/restrict_regex.py deleted file mode 100644 index 06dd46b7..00000000 --- a/skyflow/generated/rest/types/restrict_regex.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -RestrictRegex = typing.List[str] diff --git a/skyflow/generated/rest/types/transformations_shift_dates.py b/skyflow/generated/rest/types/shift_dates.py similarity index 75% rename from skyflow/generated/rest/types/transformations_shift_dates.py rename to skyflow/generated/rest/types/shift_dates.py index 21b21af8..7a1af257 100644 --- a/skyflow/generated/rest/types/transformations_shift_dates.py +++ b/skyflow/generated/rest/types/shift_dates.py @@ -4,25 +4,25 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .transformations_shift_dates_entity_types_item import TransformationsShiftDatesEntityTypesItem +from .shift_dates_entity_types_item import ShiftDatesEntityTypesItem -class TransformationsShiftDates(UniversalBaseModel): +class ShiftDates(UniversalBaseModel): """ Shift dates by a specified number of days. """ - max_days: typing.Optional[int] = pydantic.Field(default=None) + min_days: typing.Optional[int] = pydantic.Field(default=None) """ - Maximum number of days to shift the date by. + Minimum number of days to shift the date by. """ - min_days: typing.Optional[int] = pydantic.Field(default=None) + max_days: typing.Optional[int] = pydantic.Field(default=None) """ - Minimum number of days to shift the date by. + Maximum number of days to shift the date by. """ - entity_types: typing.Optional[typing.List[TransformationsShiftDatesEntityTypesItem]] = pydantic.Field(default=None) + entity_types: typing.Optional[typing.List[ShiftDatesEntityTypesItem]] = pydantic.Field(default=None) """ Entity types to shift dates for. """ diff --git a/skyflow/generated/rest/types/shift_dates_entity_types_item.py b/skyflow/generated/rest/types/shift_dates_entity_types_item.py new file mode 100644 index 00000000..b0f10d19 --- /dev/null +++ b/skyflow/generated/rest/types/shift_dates_entity_types_item.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ShiftDatesEntityTypesItem = typing.Union[typing.Literal["date", "date_interval", "dob"], typing.Any] diff --git a/skyflow/generated/rest/types/detected_entity.py b/skyflow/generated/rest/types/string_response_entities.py similarity index 82% rename from skyflow/generated/rest/types/detected_entity.py rename to skyflow/generated/rest/types/string_response_entities.py index c34ba2ca..0d72524a 100644 --- a/skyflow/generated/rest/types/detected_entity.py +++ b/skyflow/generated/rest/types/string_response_entities.py @@ -4,12 +4,11 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .entity_location import EntityLocation -class DetectedEntity(UniversalBaseModel): +class StringResponseEntities(UniversalBaseModel): """ - Detected entities. + Detected entities for String """ token: typing.Optional[str] = pydantic.Field(default=None) @@ -22,7 +21,6 @@ class DetectedEntity(UniversalBaseModel): Original text of the entity. """ - location: typing.Optional[EntityLocation] = None entity_type: typing.Optional[str] = pydantic.Field(default=None) """ Highest-rated label. @@ -30,7 +28,7 @@ class DetectedEntity(UniversalBaseModel): entity_scores: typing.Optional[typing.Dict[str, float]] = pydantic.Field(default=None) """ - entity_scores and their scores. + Labels and their scores. """ if IS_PYDANTIC_V2: diff --git a/skyflow/generated/rest/types/token_type.py b/skyflow/generated/rest/types/token_type.py deleted file mode 100644 index 200b9630..00000000 --- a/skyflow/generated/rest/types/token_type.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .entity_type import EntityType -from .token_type_default import TokenTypeDefault - - -class TokenType(UniversalBaseModel): - """ - Mapping of tokens to generation for detected entities. Can't be specified together with `token_type`. - """ - - default: typing.Optional[TokenTypeDefault] = None - vault_token: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to replace with vault tokens. - """ - - entity_unq_counter: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to replace with entity tokens with unique counters. - """ - - entity_only: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to replace with entity tokens. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/token_type_default.py b/skyflow/generated/rest/types/token_type_default.py deleted file mode 100644 index cfda9f4b..00000000 --- a/skyflow/generated/rest/types/token_type_default.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -TokenTypeDefault = typing.Union[typing.Literal["entity_only", "entity_unq_counter", "vault_token"], typing.Any] diff --git a/skyflow/generated/rest/types/token_type_mapping.py b/skyflow/generated/rest/types/token_type_mapping.py new file mode 100644 index 00000000..5644fd01 --- /dev/null +++ b/skyflow/generated/rest/types/token_type_mapping.py @@ -0,0 +1,47 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .token_type_mapping_default import TokenTypeMappingDefault +from .token_type_mapping_entity_only_item import TokenTypeMappingEntityOnlyItem +from .token_type_mapping_entity_unq_counter_item import TokenTypeMappingEntityUnqCounterItem +from .token_type_mapping_vault_token_item import TokenTypeMappingVaultTokenItem + + +class TokenTypeMapping(UniversalBaseModel): + """ + Mapping of token types to detected entities. + """ + + vault_token: typing.Optional[typing.List[TokenTypeMappingVaultTokenItem]] = pydantic.Field(default=None) + """ + Entity types to replace with vault tokens. + """ + + entity_only: typing.Optional[typing.List[TokenTypeMappingEntityOnlyItem]] = pydantic.Field(default=None) + """ + Entity types to replace with entity tokens. + """ + + entity_unq_counter: typing.Optional[typing.List[TokenTypeMappingEntityUnqCounterItem]] = pydantic.Field( + default=None + ) + """ + Entity types to replace with entity tokens with unique counters. + """ + + default: typing.Optional[TokenTypeMappingDefault] = pydantic.Field(default=None) + """ + Default token type to generate for detected entities. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/token_type_mapping_default.py b/skyflow/generated/rest/types/token_type_mapping_default.py new file mode 100644 index 00000000..36a9e4ba --- /dev/null +++ b/skyflow/generated/rest/types/token_type_mapping_default.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeMappingDefault = typing.Union[typing.Literal["entity_unq_counter", "entity_only", "vault_token"], typing.Any] diff --git a/skyflow/generated/rest/types/token_type_mapping_entity_only_item.py b/skyflow/generated/rest/types/token_type_mapping_entity_only_item.py new file mode 100644 index 00000000..d3309ab9 --- /dev/null +++ b/skyflow/generated/rest/types/token_type_mapping_entity_only_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeMappingEntityOnlyItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/token_type_mapping_entity_unq_counter_item.py b/skyflow/generated/rest/types/token_type_mapping_entity_unq_counter_item.py new file mode 100644 index 00000000..219f79ac --- /dev/null +++ b/skyflow/generated/rest/types/token_type_mapping_entity_unq_counter_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeMappingEntityUnqCounterItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/token_type_mapping_vault_token_item.py b/skyflow/generated/rest/types/token_type_mapping_vault_token_item.py new file mode 100644 index 00000000..17178ea6 --- /dev/null +++ b/skyflow/generated/rest/types/token_type_mapping_vault_token_item.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +TokenTypeMappingVaultTokenItem = typing.Union[ + typing.Literal[ + "age", + "bank_account", + "credit_card", + "credit_card_expiration", + "cvv", + "date", + "date_interval", + "dob", + "driver_license", + "email_address", + "healthcare_number", + "ip_address", + "location", + "name", + "numerical_pii", + "phone_number", + "ssn", + "url", + "vehicle_id", + "medical_code", + "name_family", + "name_given", + "account_number", + "event", + "filename", + "gender", + "language", + "location_address", + "location_city", + "location_coordinate", + "location_country", + "location_state", + "location_zip", + "marital_status", + "money", + "name_medical_professional", + "occupation", + "organization", + "organization_medical_facility", + "origin", + "passport_number", + "password", + "physical_attribute", + "political_affiliation", + "religion", + "time", + "username", + "zodiac_sign", + "blood_type", + "condition", + "dose", + "drug", + "injury", + "medical_process", + "statistics", + "routing_number", + "corporate_action", + "financial_metric", + "product", + "trend", + "duration", + "location_address_street", + "all", + "sexuality", + "effect", + "project", + "organization_id", + "day", + "month", + "year", + ], + typing.Any, +] diff --git a/skyflow/generated/rest/types/token_type_without_vault.py b/skyflow/generated/rest/types/token_type_without_vault.py deleted file mode 100644 index d79a3477..00000000 --- a/skyflow/generated/rest/types/token_type_without_vault.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .entity_type import EntityType -from .token_type_without_vault_default import TokenTypeWithoutVaultDefault - - -class TokenTypeWithoutVault(UniversalBaseModel): - """ - Mapping of tokens to generation for detected entities. Can't be specified together with `token_type`. - """ - - default: typing.Optional[TokenTypeWithoutVaultDefault] = None - entity_unq_counter: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to replace with entity tokens with unique counters. - """ - - entity_only: typing.Optional[typing.List[EntityType]] = pydantic.Field(default=None) - """ - Entity types to replace with entity tokens. - """ - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/token_type_without_vault_default.py b/skyflow/generated/rest/types/token_type_without_vault_default.py deleted file mode 100644 index 53d71dc6..00000000 --- a/skyflow/generated/rest/types/token_type_without_vault_default.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -TokenTypeWithoutVaultDefault = typing.Union[typing.Literal["entity_only", "entity_unq_counter"], typing.Any] diff --git a/skyflow/generated/rest/types/transformations.py b/skyflow/generated/rest/types/transformations.py index 352df144..9895e2f6 100644 --- a/skyflow/generated/rest/types/transformations.py +++ b/skyflow/generated/rest/types/transformations.py @@ -4,18 +4,15 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .transformations_shift_dates import TransformationsShiftDates +from .shift_dates import ShiftDates class Transformations(UniversalBaseModel): """ - Transformations to apply to the detected entities. + Transformations to apply to detected entities. """ - shift_dates: typing.Optional[TransformationsShiftDates] = pydantic.Field(default=None) - """ - Shift dates by a specified number of days. - """ + shift_dates: typing.Optional[ShiftDates] = None if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py b/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py deleted file mode 100644 index f8d98df6..00000000 --- a/skyflow/generated/rest/types/transformations_shift_dates_entity_types_item.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -TransformationsShiftDatesEntityTypesItem = typing.Union[typing.Literal["date", "date_interval", "dob"], typing.Any] diff --git a/skyflow/generated/rest/types/word_character_count.py b/skyflow/generated/rest/types/word_character_count.py new file mode 100644 index 00000000..d2506866 --- /dev/null +++ b/skyflow/generated/rest/types/word_character_count.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.serialization import FieldMetadata + + +class WordCharacterCount(UniversalBaseModel): + """ + Word and character count of the processed text. + """ + + word_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="wordCount")] = pydantic.Field( + default=None + ) + """ + Number of words in the processed text. + """ + + character_count: typing_extensions.Annotated[typing.Optional[int], FieldMetadata(alias="characterCount")] = ( + pydantic.Field(default=None) + ) + """ + Number of characters in the processed text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/version.py b/skyflow/generated/rest/version.py index e5d18b20..a93039be 100644 --- a/skyflow/generated/rest/version.py +++ b/skyflow/generated/rest/version.py @@ -1 +1,3 @@ -__version__ = "2.0.9" +from importlib import metadata + +__version__ = metadata.version("skyflow") diff --git a/skyflow/utils/__init__.py b/skyflow/utils/__init__.py index 67905512..f2788b11 100644 --- a/skyflow/utils/__init__.py +++ b/skyflow/utils/__init__.py @@ -1,4 +1,4 @@ -from ..utils.enums import LogLevel, Env +from ..utils.enums import LogLevel, Env, TokenType from ._skyflow_messages import SkyflowMessages from ._version import SDK_VERSION from ._helpers import get_base_url, format_scope diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 114079b5..899dd1b4 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -14,7 +14,7 @@ from skyflow.error import SkyflowError from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse, \ - DeidentifyStringResponse, ReidentifyStringResponse, ErrorResponse + DeidentifyStringResponse, ErrorResponse, IdentifyResponse from skyflow.generated.rest.core.http_response import HttpResponse from skyflow.utils.logger import log_error_log from skyflow.vault.detect import DeidentifyTextResponse, ReidentifyTextResponse @@ -90,12 +90,12 @@ def convert_detected_entity_to_entity_info(detected_entity): token=detected_entity.token, value=detected_entity.value, text_index=TextIndex( - start=detected_entity.location.start_index, - end=detected_entity.location.end_index + start=detected_entity.location['start_index'], + end=detected_entity.location['end_index'] ), processed_index=TextIndex( - start=detected_entity.location.start_index_processed, - end=detected_entity.location.end_index_processed + start=detected_entity.location['start_index_processed'], + end=detected_entity.location['end_index_processed'] ), entity=detected_entity.entity_type, scores=detected_entity.entity_scores @@ -388,7 +388,7 @@ def parse_deidentify_text_response(api_response: DeidentifyStringResponse): char_count=api_response.character_count ) -def parse_reidentify_text_response(api_response: ReidentifyStringResponse): +def parse_reidentify_text_response(api_response: IdentifyResponse): return ReidentifyTextResponse(api_response.text) def log_and_reject_error(description, status_code, request_id, http_status=None, grpc_code=None, details=None, logger = None): diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py index 4428d11e..3da8b3a4 100644 --- a/skyflow/utils/validations/_validations.py +++ b/skyflow/utils/validations/_validations.py @@ -1,7 +1,6 @@ import base64 import json import os -from skyflow.generated.rest import TokenType from skyflow.service_account import is_expired from skyflow.utils.enums import LogLevel, Env, RedactionType, TokenMode, DetectEntities, DetectOutputTranscriptions, \ MaskingMethod diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py index 62d551c1..b22a76b3 100644 --- a/skyflow/vault/controller/_detect.py +++ b/skyflow/vault/controller/_detect.py @@ -3,10 +3,11 @@ import os import base64 import time -from skyflow.generated.rest import DeidentifyTextRequestFile, DeidentifyAudioRequestFile, DeidentifyPdfRequestFile, \ - DeidentifyImageRequestFile, DeidentifyPresentationRequestFile, DeidentifySpreadsheetRequestFile, \ - DeidentifyDocumentRequestFile, DeidentifyFileRequestFile -from skyflow.generated.rest.types.deidentify_status_response import DeidentifyStatusResponse + +from skyflow.generated.rest import FileDataDeidentifyText, FileDataDeidentifyPdf, FileDataDeidentifyPresentation, \ + FileDataDeidentifySpreadsheet, FileDataDeidentifyDocument, FileDataDeidentifyStructuredText, FileData, \ + FileDataDeidentifyImage, Format, FileDataDeidentifyAudio, WordCharacterCount, DetectRunsResponse + from skyflow.utils._skyflow_messages import SkyflowMessages from skyflow.utils._utils import get_attribute, get_metrics, handle_exception, parse_deidentify_text_response, parse_reidentify_text_response from skyflow.utils.constants import SKY_META_DATA_HEADER @@ -14,7 +15,6 @@ from skyflow.utils.validations import validate_deidentify_file_request, validate_get_detect_run_request from skyflow.utils.validations._validations import validate_deidentify_text_request, validate_reidentify_text_request from typing import Dict, Any -from skyflow.generated.rest.strings.types.reidentify_string_request_format import ReidentifyStringRequestFormat from skyflow.vault.detect import DeidentifyTextRequest, DeidentifyTextResponse, ReidentifyTextRequest, \ ReidentifyTextResponse, DeidentifyFileRequest, DeidentifyFileResponse, GetDetectRunRequest @@ -46,7 +46,7 @@ def ___build_deidentify_text_body(self, request: DeidentifyTextRequest) -> Dict[ return deidentify_text_body def ___build_reidentify_text_body(self, request: ReidentifyTextRequest) -> Dict[str, Any]: - parsed_format = ReidentifyStringRequestFormat( + parsed_format = Format( redacted=request.redacted_entities, masked=request.masked_entities, plaintext=request.plain_text_entities @@ -84,7 +84,7 @@ def __poll_for_processed_file(self, run_id, max_wait_time=64): except Exception as e: raise e - def __save_deidentify_file_response_output(self, response: DeidentifyStatusResponse, output_directory: str, original_file_name: str, name_without_ext: str): + def __save_deidentify_file_response_output(self, response: DetectRunsResponse, output_directory: str, original_file_name: str, name_without_ext: str): if not response or not hasattr(response, 'output') or not response.output or not output_directory: return @@ -129,10 +129,10 @@ def __parse_deidentify_file_response(self, data, run_id=None, status=None): word_count = None char_count = None - word_character_count = getattr(data, "wordCharacterCount", None) - if word_character_count and isinstance(word_character_count, dict): - word_count = word_character_count.get("wordCount") - char_count = word_character_count.get("characterCount") + word_character_count = getattr(data, "word_character_count", None) + if word_character_count and isinstance(word_character_count, WordCharacterCount): + word_count = word_character_count.word_count + char_count = word_character_count.character_count size = getattr(data, "size", None) @@ -142,23 +142,20 @@ def __parse_deidentify_file_response(self, data, run_id=None, status=None): pages = getattr(data, "pages", None) slides = getattr(data, "slides", None) - # Convert output to list of dicts, prefer camelCase keys def output_to_dict_list(output): result = [] for o in output: if isinstance(o, dict): result.append({ - "file": o.get("processedFile") or o.get("processed_file"), - "type": o.get("processedFileType") or o.get("processed_file_type"), - "extension": o.get("processedFileExtension") or o.get("processed_file_extension") + "file": o.get("processed_file"), + "type": o.get("processed_file_type"), + "extension": o.get("processed_file_extension") }) else: result.append({ - "file": getattr(o, "processedFile", None) or getattr(o, "processed_file", None), - "type": getattr(o, "processedFileType", None) or getattr(o, "processed_file_type", None), - "extension": getattr(o, "processedFileExtension", None) or getattr(o, - "processed_file_extension", - None) + "file": getattr(o, "processed_file", None), + "type": getattr(o, "processed_file_type", None), + "extension": getattr(o, "processed_file_extension", None) }) return result @@ -200,7 +197,6 @@ def __get_token_format(self, request): 'default': getattr(request.token_format, "default", None), 'entity_unq_counter': getattr(request.token_format, "entity_unique_counter", None), 'entity_only': getattr(request.token_format, "entity_only", None), - 'vault_token': getattr(request.token_format, "vault_token", None) } def __get_transformations(self, request): @@ -293,7 +289,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): try: if file_extension == 'txt': - req_file = DeidentifyTextRequestFile(base_64=base64_string, data_format="txt") + req_file = FileDataDeidentifyText(base_64=base64_string, data_format="txt") api_call = files_api.deidentify_text api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -307,7 +303,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension in ['mp3', 'wav']: - req_file = DeidentifyAudioRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifyAudio(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_audio api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -327,7 +323,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension == 'pdf': - req_file = DeidentifyPdfRequestFile(base_64=base64_string) + req_file = FileDataDeidentifyPdf(base_64=base64_string) api_call = files_api.deidentify_pdf api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -342,7 +338,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension in ['jpeg', 'jpg', 'png', 'bmp', 'tif', 'tiff']: - req_file = DeidentifyImageRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifyImage(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_image api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -358,7 +354,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension in ['ppt', 'pptx']: - req_file = DeidentifyPresentationRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifyPresentation(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_presentation api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -371,7 +367,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension in ['csv', 'xls', 'xlsx']: - req_file = DeidentifySpreadsheetRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifySpreadsheet(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_spreadsheet api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -380,12 +376,11 @@ def deidentify_file(self, request: DeidentifyFileRequest): 'token_type': self.__get_token_format(request), 'allow_regex': request.allow_regex_list, 'restrict_regex': request.restrict_regex_list, - 'transformations': self.__get_transformations(request), 'request_options': self.__get_headers() } elif file_extension in ['doc', 'docx']: - req_file = DeidentifyDocumentRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifyDocument(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_document api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -398,9 +393,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } elif file_extension in ['json', 'xml']: - from skyflow.generated.rest.files.types.deidentify_structured_text_request_file import \ - DeidentifyStructuredTextRequestFile - req_file = DeidentifyStructuredTextRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileDataDeidentifyStructuredText(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_structured_text api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), @@ -414,7 +407,7 @@ def deidentify_file(self, request: DeidentifyFileRequest): } else: - req_file = DeidentifyFileRequestFile(base_64=base64_string, data_format=file_extension) + req_file = FileData(base_64=base64_string, data_format=file_extension) api_call = files_api.deidentify_file api_kwargs = { 'vault_id': self.__vault_client.get_vault_id(), diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index 6eaacf47..6fc3ae68 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -494,12 +494,12 @@ def test_parse_deidentify_text_response(self): mock_entity.value = "sensitive_value" mock_entity.entity_type = "EMAIL" mock_entity.entity_scores = {"EMAIL": 0.95} - mock_entity.location = Mock( - start_index=10, - end_index=20, - start_index_processed=15, - end_index_processed=25 - ) + mock_entity.location = { + "start_index": 10, + "end_index": 20, + "start_index_processed": 15, + "end_index_processed":25 + } mock_api_response = Mock() mock_api_response.processed_text = "Sample processed text" @@ -555,12 +555,12 @@ def test__convert_detected_entity_to_entity_info(self): mock_detected_entity.value = "sensitive_value" mock_detected_entity.entity_type = "EMAIL" mock_detected_entity.entity_scores = {"EMAIL": 0.95} - mock_detected_entity.location = Mock( - start_index=10, - end_index=20, - start_index_processed=15, - end_index_processed=25 - ) + mock_detected_entity.location = { + "start_index": 10, + "end_index": 20, + "start_index_processed": 15, + "end_index_processed":25 + } result = convert_detected_entity_to_entity_info(mock_detected_entity) @@ -580,12 +580,12 @@ def test__convert_detected_entity_to_entity_info_with_minimal_data(self): mock_detected_entity.value = None mock_detected_entity.entity_type = "UNKNOWN" mock_detected_entity.entity_scores = {} - mock_detected_entity.location = Mock( - start_index=0, - end_index=0, - start_index_processed=0, - end_index_processed=0 - ) + mock_detected_entity.location = { + "start_index": 0, + "end_index": 0, + "start_index_processed":0, + "end_index_processed":0 + } result = convert_detected_entity_to_entity_info(mock_detected_entity) diff --git a/tests/vault/controller/test__detect.py b/tests/vault/controller/test__detect.py index dc3a753f..c2f9a861 100644 --- a/tests/vault/controller/test__detect.py +++ b/tests/vault/controller/test__detect.py @@ -3,6 +3,7 @@ import base64 import os from skyflow.error import SkyflowError +from skyflow.generated.rest import WordCharacterCount from skyflow.utils import SkyflowMessages from skyflow.vault.controller import Detect from skyflow.vault.detect import DeidentifyTextRequest, ReidentifyTextRequest, \ @@ -149,7 +150,7 @@ def test_deidentify_file_txt_success(self, mock_open, mock_basename, mock_base64 processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [] - processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) + processed_response.word_character_count = WordCharacterCount(word_count=1, character_count=1) with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", @@ -211,7 +212,7 @@ def test_deidentify_file_audio_success(self, mock_base64, mock_validate): processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [] - processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) + processed_response.word_character_count = Mock(word_count=1, character_count=1) with patch.object(self.detect, "_Detect__poll_for_processed_file", return_value=processed_response) as mock_poll, \ patch.object(self.detect, "_Detect__parse_deidentify_file_response", @@ -295,16 +296,15 @@ def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, processed_response = Mock() processed_response.status = "SUCCESS" processed_response.output = [ - {"processedFile": "dGVzdCBjb250ZW50", "processedFileType": "pdf", "processedFileExtension": "pdf"} + {"processed_file": "dGVzdCBjb250ZW50", "processed_file_type": "pdf", "processed_file_extension": "pdf"} ] - processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) processed_response.size = 1 processed_response.duration = 1 processed_response.pages = 1 processed_response.slides = 1 processed_response.message = "" processed_response.run_id = "runid123" - processed_response.wordCharacterCount = {"wordCount": 1, "characterCount": 1} + processed_response.word_character_count = WordCharacterCount(word_count=1, character_count=1) mock_poll.return_value = processed_response # Test configuration for different file types @@ -352,6 +352,7 @@ def test_deidentify_file_all_branches(self, mock_poll, mock_open, mock_basename, result = self.detect.deidentify_file(req) # Verify the result + print("Result : ", result) self.assertIsInstance(result, DeidentifyFileResponse) self.assertEqual(result.status, "SUCCESS") self.assertEqual(result.run_id, "runid123") @@ -661,7 +662,7 @@ def test_deidentify_file_using_file_path(self, mock_open, mock_basename, mock_ba processedFileType="txt", processedFileExtension="txt") ] - processed_response.wordCharacterCount = Mock(wordCount=1, characterCount=1) + processed_response.word_character_count = WordCharacterCount(word_count=1, character_count=1) # Test the method with patch.object(self.detect, "_Detect__poll_for_processed_file", From ac51edf830fe61cf2d06f4e4cdc62fc892a1b7cb Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 6 Nov 2025 14:27:42 +0000 Subject: [PATCH 47/60] [AUTOMATED] Private Release 1.15.8.dev0+08b535a --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a95dc1dd..d8473340 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.5.dev0+dab7b66' +current_version = '1.15.8.dev0+08b535a' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index f15769f8..72bda35b 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.5.dev0+dab7b66' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+08b535a' \ No newline at end of file From 4c277755032784d2a203eddca83afb1ef9ed5f8e Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 6 Nov 2025 21:12:28 +0530 Subject: [PATCH 48/60] SK-2353: fix indentation --- skyflow/vault/controller/_detect.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/skyflow/vault/controller/_detect.py b/skyflow/vault/controller/_detect.py index b22a76b3..44ef2540 100644 --- a/skyflow/vault/controller/_detect.py +++ b/skyflow/vault/controller/_detect.py @@ -3,11 +3,9 @@ import os import base64 import time - from skyflow.generated.rest import FileDataDeidentifyText, FileDataDeidentifyPdf, FileDataDeidentifyPresentation, \ FileDataDeidentifySpreadsheet, FileDataDeidentifyDocument, FileDataDeidentifyStructuredText, FileData, \ FileDataDeidentifyImage, Format, FileDataDeidentifyAudio, WordCharacterCount, DetectRunsResponse - from skyflow.utils._skyflow_messages import SkyflowMessages from skyflow.utils._utils import get_attribute, get_metrics, handle_exception, parse_deidentify_text_response, parse_reidentify_text_response from skyflow.utils.constants import SKY_META_DATA_HEADER @@ -18,7 +16,6 @@ from skyflow.vault.detect import DeidentifyTextRequest, DeidentifyTextResponse, ReidentifyTextRequest, \ ReidentifyTextResponse, DeidentifyFileRequest, DeidentifyFileResponse, GetDetectRunRequest - class Detect: def __init__(self, vault_client): self.__vault_client = vault_client From 9e85bbab0e1dd0127f317490675a76564a132f25 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 6 Nov 2025 15:43:05 +0000 Subject: [PATCH 49/60] [AUTOMATED] Private Release 1.15.8.dev0+4c27775 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index d8473340..5b22b2ac 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+08b535a' +current_version = '1.15.8.dev0+4c27775' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 72bda35b..a1a651c8 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+08b535a' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+4c27775' \ No newline at end of file From ba735537d2780e3ff22893b45db9c25763f5930e Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 6 Nov 2025 23:19:31 +0530 Subject: [PATCH 50/60] SK-2385: add locations to the generated code --- skyflow/generated/rest/__init__.py | 2 + skyflow/generated/rest/types/__init__.py | 2 + skyflow/generated/rest/types/locations.py | 41 +++++++++++++++++++ .../rest/types/string_response_entities.py | 2 + skyflow/utils/_utils.py | 8 ++-- tests/utils/test__utils.py | 36 ++++++++-------- 6 files changed, 69 insertions(+), 22 deletions(-) create mode 100644 skyflow/generated/rest/types/locations.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py index 8a59c25d..bdb94808 100644 --- a/skyflow/generated/rest/__init__.py +++ b/skyflow/generated/rest/__init__.py @@ -48,6 +48,7 @@ GooglerpcStatus, HttpCode, IdentifyResponse, + Locations, ProtobufAny, RedactionEnumRedaction, ReidentifiedFileOutput, @@ -201,6 +202,7 @@ "HttpCode", "IdentifyResponse", "InternalServerError", + "Locations", "NotFoundError", "ProtobufAny", "RecordServiceBulkGetRecordRequestOrderBy", diff --git a/skyflow/generated/rest/types/__init__.py b/skyflow/generated/rest/types/__init__.py index 75979081..11929765 100644 --- a/skyflow/generated/rest/types/__init__.py +++ b/skyflow/generated/rest/types/__init__.py @@ -47,6 +47,7 @@ from .googlerpc_status import GooglerpcStatus from .http_code import HttpCode from .identify_response import IdentifyResponse +from .locations import Locations from .protobuf_any import ProtobufAny from .redaction_enum_redaction import RedactionEnumRedaction from .reidentified_file_output import ReidentifiedFileOutput @@ -146,6 +147,7 @@ "GooglerpcStatus", "HttpCode", "IdentifyResponse", + "Locations", "ProtobufAny", "RedactionEnumRedaction", "ReidentifiedFileOutput", diff --git a/skyflow/generated/rest/types/locations.py b/skyflow/generated/rest/types/locations.py new file mode 100644 index 00000000..7b8ba88e --- /dev/null +++ b/skyflow/generated/rest/types/locations.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class Locations(UniversalBaseModel): + """ + Locations of an entity in the text. + """ + + start_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the original text. + """ + + end_index: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the original text. + """ + + start_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the first character of the string in the processed text. + """ + + end_index_processed: typing.Optional[int] = pydantic.Field(default=None) + """ + Index of the last character of the string in the processed text. + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/skyflow/generated/rest/types/string_response_entities.py b/skyflow/generated/rest/types/string_response_entities.py index 0d72524a..864d5a46 100644 --- a/skyflow/generated/rest/types/string_response_entities.py +++ b/skyflow/generated/rest/types/string_response_entities.py @@ -4,6 +4,7 @@ import pydantic from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .locations import Locations class StringResponseEntities(UniversalBaseModel): @@ -21,6 +22,7 @@ class StringResponseEntities(UniversalBaseModel): Original text of the entity. """ + location: typing.Optional[Locations] = None entity_type: typing.Optional[str] = pydantic.Field(default=None) """ Highest-rated label. diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py index 899dd1b4..4278357e 100644 --- a/skyflow/utils/_utils.py +++ b/skyflow/utils/_utils.py @@ -90,12 +90,12 @@ def convert_detected_entity_to_entity_info(detected_entity): token=detected_entity.token, value=detected_entity.value, text_index=TextIndex( - start=detected_entity.location['start_index'], - end=detected_entity.location['end_index'] + start=detected_entity.location.start_index, + end=detected_entity.location.end_index ), processed_index=TextIndex( - start=detected_entity.location['start_index_processed'], - end=detected_entity.location['end_index_processed'] + start=detected_entity.location.start_index_processed, + end=detected_entity.location.end_index_processed ), entity=detected_entity.entity_type, scores=detected_entity.entity_scores diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py index 6fc3ae68..6eaacf47 100644 --- a/tests/utils/test__utils.py +++ b/tests/utils/test__utils.py @@ -494,12 +494,12 @@ def test_parse_deidentify_text_response(self): mock_entity.value = "sensitive_value" mock_entity.entity_type = "EMAIL" mock_entity.entity_scores = {"EMAIL": 0.95} - mock_entity.location = { - "start_index": 10, - "end_index": 20, - "start_index_processed": 15, - "end_index_processed":25 - } + mock_entity.location = Mock( + start_index=10, + end_index=20, + start_index_processed=15, + end_index_processed=25 + ) mock_api_response = Mock() mock_api_response.processed_text = "Sample processed text" @@ -555,12 +555,12 @@ def test__convert_detected_entity_to_entity_info(self): mock_detected_entity.value = "sensitive_value" mock_detected_entity.entity_type = "EMAIL" mock_detected_entity.entity_scores = {"EMAIL": 0.95} - mock_detected_entity.location = { - "start_index": 10, - "end_index": 20, - "start_index_processed": 15, - "end_index_processed":25 - } + mock_detected_entity.location = Mock( + start_index=10, + end_index=20, + start_index_processed=15, + end_index_processed=25 + ) result = convert_detected_entity_to_entity_info(mock_detected_entity) @@ -580,12 +580,12 @@ def test__convert_detected_entity_to_entity_info_with_minimal_data(self): mock_detected_entity.value = None mock_detected_entity.entity_type = "UNKNOWN" mock_detected_entity.entity_scores = {} - mock_detected_entity.location = { - "start_index": 0, - "end_index": 0, - "start_index_processed":0, - "end_index_processed":0 - } + mock_detected_entity.location = Mock( + start_index=0, + end_index=0, + start_index_processed=0, + end_index_processed=0 + ) result = convert_detected_entity_to_entity_info(mock_detected_entity) From 155a6895cc9d73368494f2b6a6055a707852215b Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Thu, 6 Nov 2025 17:50:50 +0000 Subject: [PATCH 51/60] [AUTOMATED] Private Release 1.15.8.dev0+ba73553 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 5b22b2ac..903f488f 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+4c27775' +current_version = '1.15.8.dev0+ba73553' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index a1a651c8..661806df 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+4c27775' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+ba73553' \ No newline at end of file From 3e794e3979199f15638a3f3ced709b35e6bc4a69 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 7 Nov 2025 12:50:23 +0530 Subject: [PATCH 52/60] SK-2385: update output transcriptions enum --- skyflow/utils/enums/detect_output_transcriptions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/skyflow/utils/enums/detect_output_transcriptions.py b/skyflow/utils/enums/detect_output_transcriptions.py index 69f94d79..4e14f911 100644 --- a/skyflow/utils/enums/detect_output_transcriptions.py +++ b/skyflow/utils/enums/detect_output_transcriptions.py @@ -4,5 +4,4 @@ class DetectOutputTranscriptions(Enum): DIARIZED_TRANSCRIPTION = "diarized_transcription" MEDICAL_DIARIZED_TRANSCRIPTION = "medical_diarized_transcription" MEDICAL_TRANSCRIPTION = "medical_transcription" - PLAINTEXT_TRANSCRIPTION = "plaintext_transcription" TRANSCRIPTION = "transcription" \ No newline at end of file From 342df8ba3b6c27380e343dd969617ef77628b059 Mon Sep 17 00:00:00 2001 From: saileshwar-skyflow Date: Fri, 7 Nov 2025 07:21:02 +0000 Subject: [PATCH 53/60] [AUTOMATED] Private Release 1.15.8.dev0+3e794e3 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 903f488f..2a7056bc 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+ba73553' +current_version = '1.15.8.dev0+3e794e3' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 661806df..53455202 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+ba73553' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+3e794e3' \ No newline at end of file From a22b9c67b24d23c3b867c60d3b6547870a239b9c Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 17:00:39 +0530 Subject: [PATCH 54/60] SK-1773 Add str for file upload response --- skyflow/vault/data/_file_upload_response.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/skyflow/vault/data/_file_upload_response.py b/skyflow/vault/data/_file_upload_response.py index 18218f08..91a43c26 100644 --- a/skyflow/vault/data/_file_upload_response.py +++ b/skyflow/vault/data/_file_upload_response.py @@ -4,3 +4,9 @@ def __init__(self, errors): self.skyflow_id = skyflow_id self.errors = errors + + def __repr__(self): + return f"FileUploadResponse(skyflow_id={self.skyflow_id}, errors={self.errors})" + + def __str__(self): + return self.__repr__() \ No newline at end of file From 255c8eca5327cee6652b482bcbabd93696189699 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 11:31:11 +0000 Subject: [PATCH 55/60] [AUTOMATED] Private Release 1.15.8.dev0+a22b9c6 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 2a7056bc..2c8f3e19 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+3e794e3' +current_version = '1.15.8.dev0+a22b9c6' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 53455202..da343d6c 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+3e794e3' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+a22b9c6' \ No newline at end of file From 3fe71714ee8d464f5ec3ce68873b2af5c7cb9614 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 11:42:25 +0000 Subject: [PATCH 56/60] [AUTOMATED] Public Release - 2.0.0 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 2c8f3e19..9f06860f 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+a22b9c6' +current_version = '2.0.0' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index da343d6c..0d05fc30 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+a22b9c6' \ No newline at end of file +SDK_VERSION = '2.0.0' \ No newline at end of file From a3f1f0f488849428e080bb0f323bdc1094735ea5 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 19:12:26 +0530 Subject: [PATCH 57/60] SK-1773 Dummy change to trigger release --- skyflow/utils/constants.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/skyflow/utils/constants.py b/skyflow/utils/constants.py index d7e7a7f9..ef20faf8 100644 --- a/skyflow/utils/constants.py +++ b/skyflow/utils/constants.py @@ -1,3 +1,4 @@ OPTIONAL_TOKEN='token' PROTOCOL='https' -SKY_META_DATA_HEADER='sky-metadata' \ No newline at end of file +SKY_META_DATA_HEADER='sky-metadata' + From 080d5b51c76322f3b9d90a17b2034172f1e1e992 Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 19:43:18 +0530 Subject: [PATCH 58/60] SK-1773 Update release workflow --- .github/workflows/shared-build-and-deploy.yml | 10 +++++----- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml index e826c20b..135b87bc 100644 --- a/.github/workflows/shared-build-and-deploy.yml +++ b/.github/workflows/shared-build-and-deploy.yml @@ -27,11 +27,6 @@ jobs: python -m pip install --upgrade pip pip install setuptools wheel twine - - name: Build and install skyflow package - run: | - python setup.py sdist bdist_wheel - pip install dist/skyflow-*.whl - - name: Resolve Branch for the Tagged Commit id: resolve-branch if: ${{ inputs.tag == 'beta' || inputs.tag == 'public' }} @@ -87,6 +82,11 @@ jobs: git push origin ${{ env.branch_name }} fi + - name: Build and install skyflow package + run: | + python setup.py sdist bdist_wheel + pip install dist/skyflow-*.whl + - name: Build and Publish Package if: ${{ inputs.tag == 'beta' || inputs.tag == 'public' }} env: diff --git a/setup.py b/setup.py index 9f06860f..2c8f3e19 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '2.0.0' +current_version = '1.15.8.dev0+a22b9c6' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index 0d05fc30..da343d6c 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '2.0.0' \ No newline at end of file +SDK_VERSION = '1.15.8.dev0+a22b9c6' \ No newline at end of file From 5691f5e12e9e489ba654048b33111bb4020396dd Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Tue, 11 Nov 2025 14:16:27 +0000 Subject: [PATCH 59/60] [AUTOMATED] Public Release - 2.0.0 --- setup.py | 2 +- skyflow/utils/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 2c8f3e19..9f06860f 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if sys.version_info < (3, 8): raise RuntimeError("skyflow requires Python 3.8+") -current_version = '1.15.8.dev0+a22b9c6' +current_version = '2.0.0' setup( name='skyflow', diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py index da343d6c..0d05fc30 100644 --- a/skyflow/utils/_version.py +++ b/skyflow/utils/_version.py @@ -1 +1 @@ -SDK_VERSION = '1.15.8.dev0+a22b9c6' \ No newline at end of file +SDK_VERSION = '2.0.0' \ No newline at end of file From a0a064dde0989844b56f27ff55701c77b3cf3d5b Mon Sep 17 00:00:00 2001 From: skyflow-vivek Date: Thu, 13 Nov 2025 15:53:41 +0530 Subject: [PATCH 60/60] SK-2392 Add sample and update README for upload file --- README.md | 113 ++++++++++++++++++++++++++----- samples/vault_api/upload_file.py | 78 +++++++++++++++++++++ 2 files changed, 173 insertions(+), 18 deletions(-) create mode 100644 samples/vault_api/upload_file.py diff --git a/README.md b/README.md index 67b0d1c9..bb5d1c04 100644 --- a/README.md +++ b/README.md @@ -10,11 +10,11 @@ The Skyflow Python SDK is designed to help with integrating Skyflow into a Pytho - [Requirements](#requirements) - [Configuration](#configuration) - [Migration from v1 to v2](#migration-from-v1-to-v2) - - [Authentication options](#1-authentication-options) - - [Initializing the client](#2-initializing-the-client) - - [Request & response structure](#3-request--response-structure) - - [Request options](#4-request-options) - - [Error structure](#5-error-structure) + - [Authentication options](#authentication-options) + - [Initializing the client](#initializing-the-client) + - [Request & response structure](#request--response-structure) + - [Request options](#request-options) + - [Error structure](#error-structure) - [Quickstart](#quickstart) - [Authenticate](#authenticate) - [Initialize the client](#initialize-the-client) @@ -31,6 +31,7 @@ The Skyflow Python SDK is designed to help with integrating Skyflow into a Pytho - [Update](#update) - [Delete](#delete) - [Query](#query) + - [Upload File](#upload-file) - [Detect](#detect) - [Deidentify Text](#deidentify-text) - [Reidentify Text](#reidentify-text) @@ -771,7 +772,7 @@ Notes: - `redaction_type` defaults to `RedactionType.PLAIN_TEXT`. - `continue_on_error` default valus is `False`. -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/detokenize_records.py) of a detokenize call +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/detokenize_records.py) of a detokenize call ```python from skyflow.error import SkyflowError @@ -948,7 +949,7 @@ except Exception as error: print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/tokenize_records.py) of Tokenize call +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/tokenize_records.py) of Tokenize call ```python from skyflow.error import SkyflowError @@ -1086,7 +1087,7 @@ except Exception as error: Retrieve specific records using skyflow `ids`. Ideal for fetching exact records when IDs are known. -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/get_records.py) of a get call to retrieve data using Redaction type: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/get_records.py) of a get call to retrieve data using Redaction type: ```python from skyflow.error import SkyflowError @@ -1163,7 +1164,7 @@ GetResponse( #### Get tokens Return tokens for records. Ideal for securely processing sensitive data while maintaining data privacy. -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/get_records.py) of get call to retrieve tokens using Skyflow IDs: +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/get_records.py) of get call to retrieve tokens using Skyflow IDs: ```python @@ -1386,7 +1387,7 @@ except Exception as error: print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/update_record.py) of update call +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/update_record.py) of update call ```python from skyflow.error import SkyflowError @@ -1513,7 +1514,7 @@ except Exception as error: print('Unexpected Error:', error) # Print the exception stack trace for debugging purposes ``` -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/delete_records.py) of delete call +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/delete_records.py) of delete call ```python from skyflow.error import SkyflowError @@ -1613,7 +1614,7 @@ except Exception as error: # Handle any unexpected errors during execution print('Unexpected Error:', error) # Print the stack trace for debugging purposes ``` -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/query_records.py) of query call +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/query_records.py) of query call ```python from skyflow.error import SkyflowError @@ -1673,6 +1674,82 @@ QueryResponse( ) ``` +### Upload File +To upload files to a Skyflow vault, use the `upload_file` method. The `FileUploadRequest` class accepts parameters such as the table name, column name, skyflow ID, and either a file path, a file object or a base64 encoded file. + +#### Construct an upload file request + +You can upload a file by providing either a file path or a file object: + +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import FileUploadRequest + +try: + with open('', 'rb') as file_obj: + file_upload_request = FileUploadRequest( + table='', # Table to upload file to + column_name='', # Column to upload file into + file_object=file_obj, # File object opened in binary mode + skyflow_id='' # Record ID to associate the file with + ) + response = skyflow_client.vault('').upload_file(file_upload_request) + print('File upload successful:', response) + +except SkyflowError as error: + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) +``` + +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/upload_file.py) of upload file call + +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import FileUploadRequest + +""" +This example demonstrates how to upload a file to a Skyflow vault using a file object. + +1. Initializes the Skyflow client with the Vault ID. +2. Constructs an upload file request. +3. Uploads the file into the Skyflow vault. +4. Prints the response to confirm the success or failure of the upload file operation. +""" + +try: + # Initialize Skyflow client + # Step 1: Open a file + with open('my_document.pdf', 'rb') as file_obj: + # Step 2: Create a FileIploadRequest to define the upload file operation + file_upload_request = FileUploadRequest( + table='documents', + column_name='attachment', + file_object=file_obj, + skyflow_id='123e4567-e89b-12d3-a456-426614174000' + ) + + # Step 3: Execute the upload file request on the specified Skyflow vault + response = skyflow_client.vault('9f27764a10f7946fe56b3258e117').upload_file(file_upload_request) + + # Step 4: Print the response containing the query results + print('File upload successful:', response) + +except SkyflowError as error: + # Step 5: Handle any exceptions that occur during the query execution + print('Skyflow Specific Error:', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) +except Exception as error: + print('Unexpected Error:', error) # Print the stack trace for debugging purposes +``` + ## Detect Skyflow Detect enables you to deidentify and reidentify sensitive data in text and files, supporting advanced privacy-preserving workflows. The Detect API supports the following operations: @@ -2254,7 +2331,7 @@ except Exception as error: **path_params, query_params, header, body** are the JSON objects represented as dictionaries that will be sent through the connection integration url. -#### An [example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/vault_api/invoke_connection.py) of Invoke Connection +#### An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/vault_api/invoke_connection.py) of Invoke Connection ```python from skyflow import Skyflow, LogLevel @@ -2364,7 +2441,7 @@ The [Service Account](https://github.com/skyflowapi/skyflow-python/tree/v2/skyfl The `generate_bearer_token(filepath)` function takes the credentials file path for token generation, alternatively, you can also send the entire credentials as string, by using `generate_bearer_token_from_creds(credentials)` -#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/token_generation_example.py): +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/service_account/token_generation_example.py): ```python import json @@ -2439,7 +2516,7 @@ except Exception as e: A service account with the context_id identifier generates bearer tokens containing context information, represented as a JWT claim in a Skyflow-generated bearer token. Tokens generated from such service accounts include a context_identifier claim, are valid for 60 minutes, and can be used to make API calls to the Data and Management APIs, depending on the service account's permissions. -#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/token_generation_with_context_example.py): +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/service_account/token_generation_with_context_example.py): ```python import json from skyflow.error import SkyflowError @@ -2519,7 +2596,7 @@ except Exception as e: #### Generate scoped bearer tokens A service account with multiple roles can generate bearer tokens with access limited to a specific role by specifying the appropriate roleID. This can be used to limit access to specific roles for services with multiple responsibilities, such as segregating access for billing and analytics. The generated bearer tokens are valid for 60 minutes and can only execute operations permitted by the permissions associated with the designated role. -#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/scoped_token_generation_example.py): +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/service_account/scoped_token_generation_example.py): ```python import json from skyflow.error import SkyflowError @@ -2565,7 +2642,7 @@ except Exception as e: #### Generate signed data tokens Skyflow generates data tokens when sensitive data is inserted into the vault. These data tokens can be digitally signed with a service account's private key, adding an extra layer of protection. Signed tokens can only be detokenized by providing the signed data token along with a bearer token generated from the service account's credentials. The service account must have the necessary permissions and context to successfully detokenize the signed data tokens. -#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/signed_token_generation_example.py): +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/service_account/signed_token_generation_example.py): ```python import json from skyflow.error import SkyflowError @@ -2648,7 +2725,7 @@ message: Authentication failed. Bearer token is expired. Use a valid bearer toke If you encounter this kind of error, retry the request. During the retry, the SDK detects that the previous bearer token has expired and generates a new one for the current and subsequent requests. -#### [Example](https://github.com/skyflowapi/skyflow-python/blob/v2/samples/service_account/bearer_token_expiry_example.py): +#### [Example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/service_account/bearer_token_expiry_example.py): ```python import json from skyflow.error import SkyflowError diff --git a/samples/vault_api/upload_file.py b/samples/vault_api/upload_file.py new file mode 100644 index 00000000..df3e8cd0 --- /dev/null +++ b/samples/vault_api/upload_file.py @@ -0,0 +1,78 @@ +import json +from skyflow.error import SkyflowError +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import FileUploadRequest + +""" + * Skyflow File Upload Example + * + * This example demonstrates how to: + * 1. Configure Skyflow client credentials + * 2. Set up vault configuration + * 3. Create a file upload request + * 4. Handle response and errors +""" + +def perform_file_upload(): + try: + # Step 1: Configure Credentials + cred = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + + skyflow_credentials = { + 'credentials_string': json.dumps(cred) + } + + credentials = { + 'token': '' + } + + # Step 2: Configure Vault + primary_vault_config = { + 'vault_id': '', + 'cluster_id': '', + 'env': Env.PROD, + 'credentials': credentials + } + + # Step 3: Configure & Initialize Skyflow Client + skyflow_client = ( + Skyflow.builder() + .add_vault_config(primary_vault_config) + .add_skyflow_credentials(skyflow_credentials) + .set_log_level(LogLevel.ERROR) + .build() + ) + + # Step 4: Prepare File Upload Data + with open('', 'rb') as file_obj: + file_upload_request = FileUploadRequest( + table='', # Table to upload file to + column_name='', # Column to upload file into + file_object=file_obj, # Pass file object + skyflow_id='' # Record ID to associate the file with + ) + + # Step 5: Perform File Upload + response = skyflow_client.vault('').upload_file(file_upload_request) + + # Handle Successful Response + print('File upload successful: ', response) + + except SkyflowError as error: + print('Skyflow Specific Error: ', { + 'code': error.http_code, + 'message': error.message, + 'details': error.details + }) + except Exception as error: + print('Unexpected Error:', error) + +# Invoke the file upload function +perform_file_upload()