diff --git a/.github/workflows/beta-release.yml b/.github/workflows/beta-release.yml new file mode 100644 index 00000000..8fab88eb --- /dev/null +++ b/.github/workflows/beta-release.yml @@ -0,0 +1,18 @@ +name: Public Beta Release + +on: + push: + tags: '*.*.*b*' + paths-ignore: + - "setup.py" + - "*.yml" + - "*.md" + - "skyflow/utils/_version.py" + +jobs: + build-and-deploy: + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: main + pypi-token: ${{ secrets.PYPI_PUBLISH_TOKEN }} + is-internal: false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 47a7281e..e1a04a3c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,46 +16,9 @@ jobs: checkAllCommitMessages: 'true' accessToken: ${{ secrets.PAT_ACTIONS }} error: 'One of your your commit messages is not matching the format with JIRA ID Ex: ( SDK-123 commit message )' - Test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.7' - - - name: create-json - id: create-json - uses: jsdaniell/create-json@1.1.2 - with: - name: "credentials.json" - json: ${{ secrets.CREDENTIALS_FILE_STR }} - - - name: 'Setup .env' - run: | - touch .env - echo VAULT_ID=${{secrets.VAULT_ID}} >> .env - echo VAULT_URL=${{secrets.VAULT_URL}} >> .env - echo DETOKENIZE_TEST_TOKEN=${{secrets.DETOKENIZE_TEST_TOKEN}} >> .env - echo DETOKENIZE_TEST_VALUE=${{secrets.DETOKENIZE_TEST_VALUE}} >> .env - echo CREDENTIALS_FILE_PATH=./credentials.json >> .env - echo CVV_GEN_CONNECTION_URL=${{secrets.CVV_GEN_CONNECTION_URL}} >> .env - echo VISA_CONNECTION_BASIC_AUTH=${{secrets.VISA_CONNECTION_BASIC_AUTH}} >> .env - echo SKYFLOW_ID1=${{secrets.SKYFLOW_ID1}} >> .env - echo SKYFLOW_ID2=${{secrets.SKYFLOW_ID2}} >> .env - echo SKYFLOW_ID3=${{secrets.SKYFLOW_ID3}} >> .env - - name: 'Run Tests' - run: | - python -m pip install --upgrade pip - pip install requests pyjwt datetime aiohttp cryptography python-dotenv coverage - coverage run --source skyflow -m unittest discover - - name: coverage - run: coverage xml -o test-coverage.xml - - name: Codecov - uses: codecov/codecov-action@v2.1.0 - with: - token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} - files: test-coverage.xml - name: codecov-skyflow-python - verbose: true + test: + uses: ./.github/workflows/shared-tests.yml + with: + python-version: '3.8' + secrets: inherit diff --git a/.github/workflows/internal-release.yml b/.github/workflows/internal-release.yml new file mode 100644 index 00000000..d4ad9400 --- /dev/null +++ b/.github/workflows/internal-release.yml @@ -0,0 +1,23 @@ +name: Internal Release + +on: + push: + tags-ignore: + - '*.*' + paths-ignore: + - "setup.py" + - "*.yml" + - "*.md" + - "skyflow/utils/_version.py" + - "samples/**" + branches: + - release/* + +jobs: + build-and-deploy: + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: ${{ github.ref_name }} + is-internal: true + secrets: inherit + \ No newline at end of file diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 68d9b0a8..e0a689de 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,46 +6,7 @@ on: - main jobs: - Test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.7' - - - name: create-json - id: create-json - uses: jsdaniell/create-json@1.1.2 - with: - name: "credentials.json" - json: ${{ secrets.CREDENTIALS_FILE_STR }} - - - name: 'Setup .env' - run: | - touch .env - echo VAULT_ID=${{secrets.VAULT_ID}} >> .env - echo VAULT_URL=${{secrets.VAULT_URL}} >> .env - echo DETOKENIZE_TEST_TOKEN=${{secrets.DETOKENIZE_TEST_TOKEN}} >> .env - echo DETOKENIZE_TEST_VALUE=${{secrets.DETOKENIZE_TEST_VALUE}} >> .env - echo CREDENTIALS_FILE_PATH=./credentials.json >> .env - echo CVV_GEN_CONNECTION_URL=${{secrets.CVV_GEN_CONNECTION_URL}} >> .env - echo VISA_CONNECTION_BASIC_AUTH=${{secrets.VISA_CONNECTION_BASIC_AUTH}} >> .env - echo SKYFLOW_ID1=${{secrets.SKYFLOW_ID1}} >> .env - echo SKYFLOW_ID2=${{secrets.SKYFLOW_ID2}} >> .env - echo SKYFLOW_ID3=${{secrets.SKYFLOW_ID3}} >> .env - - - name: 'Run Tests' - run: | - python -m pip install --upgrade pip - pip install requests pyjwt datetime aiohttp cryptography python-dotenv coverage - coverage run --source skyflow -m unittest discover - - name: coverage - run: coverage xml -o test-coverage.xml - - name: Codecov - uses: codecov/codecov-action@v2.1.0 - with: - token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} - files: test-coverage.xml - name: codecov-skyflow-python - verbose: true \ No newline at end of file + test: + uses: ./.github/workflows/shared-tests.yml + with: + python-version: '3.7' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5fcd5dbe..53894cd6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,47 +7,12 @@ on: - "setup.py" - "*.yml" - "*.md" - - "skyflow/version.py" + - "skyflow/utils/_version.py" jobs: build-and-deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - token: ${{ secrets.PAT_ACTIONS }} - ref: main - fetch-depth: 0 - - uses: actions/setup-python@v2 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - - name: Get Previous tag - id: previoustag - uses: WyriHaximus/github-action-get-previous-tag@v1 - with: - fallback: 1.0.0 - - - name: Bump Version - run: | - chmod +x ./ci-scripts/bump_version.sh - ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" - - - name: Commit changes - run: | - git config user.name ${{ github.actor }} - git config user.email ${{ github.actor }}@users.noreply.github.com - git add setup.py - git add skyflow/version.py - git commit -m "[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" - git push origin - - - name: Build and publish - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* + uses: ./.github/workflows/shared-build-and-deploy.yml + with: + ref: main + is-internal: false + secrets: inherit diff --git a/.github/workflows/shared-build-and-deploy.yml b/.github/workflows/shared-build-and-deploy.yml new file mode 100644 index 00000000..9d349fbb --- /dev/null +++ b/.github/workflows/shared-build-and-deploy.yml @@ -0,0 +1,84 @@ +name: Shared Build and Deploy + +on: + workflow_call: + inputs: + ref: + description: 'Git reference to use (e.g., main or branch name)' + required: true + type: string + + is-internal: + description: 'Flag for internal release' + required: true + type: boolean + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ inputs.ref }} + fetch-depth: 0 + + - uses: actions/setup-python@v2 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + + - name: Get Previous tag + id: previoustag + uses: WyriHaximus/github-action-get-previous-tag@v1 + with: + fallback: 1.0.0 + + - name: Bump Version + run: | + chmod +x ./ci-scripts/bump_version.sh + if ${{ inputs.is-internal }}; then + ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" "$(git rev-parse --short "$GITHUB_SHA")" + else + ./ci-scripts/bump_version.sh "${{ steps.previoustag.outputs.tag }}" + fi + + - name: Commit changes + run: | + git config user.name "${{ github.actor }}" + git config user.email "${{ github.actor }}@users.noreply.github.com" + git add setup.py + git add skyflow/utils/_version.py + + if [ "${{ inputs.is-internal }}" = "true" ]; then + VERSION="${{ steps.previoustag.outputs.tag }}.dev0+$(git rev-parse --short $GITHUB_SHA)" + COMMIT_MESSAGE="[AUTOMATED] Private Release $VERSION" + git commit -m "$COMMIT_MESSAGE" + git push origin ${{ github.ref_name }} -f + else + COMMIT_MESSAGE="[AUTOMATED] Public Release - ${{ steps.previoustag.outputs.tag }}" + git commit -m "$COMMIT_MESSAGE" + git push origin + fi + + - name: Build and Publish Package + if: ${{ !inputs.is-internal }} + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + + - name: Build and Publish to JFrog Artifactory + if: ${{ inputs.is-internal }} + env: + TWINE_USERNAME: ${{ secrets.JFROG_USERNAME }} + TWINE_PASSWORD: ${{ secrets.JFROG_PASSWORD }} + run: | + python setup.py sdist bdist_wheel + twine upload --repository-url https://prekarilabs.jfrog.io/artifactory/api/pypi/skyflow-python/ dist/* + + + \ No newline at end of file diff --git a/.github/workflows/shared-tests.yml b/.github/workflows/shared-tests.yml new file mode 100644 index 00000000..2c54d2b7 --- /dev/null +++ b/.github/workflows/shared-tests.yml @@ -0,0 +1,41 @@ +name: Shared Test Steps + +on: + workflow_call: + inputs: + python-version: + description: 'Python version to use' + required: true + type: string + +jobs: + run-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ inputs.python-version }} + + - name: create-json + id: create-json + uses: jsdaniell/create-json@1.1.2 + with: + name: "credentials.json" + json: ${{ secrets.VALID_SKYFLOW_CREDS_TEST }} + + - name: 'Run Tests' + run: | + pip install -r requirements.txt + python -m coverage run --source=skyflow --omit=skyflow/generated/*,skyflow/utils/validations/*,skyflow/vault/data/*,skyflow/vault/tokens/*,skyflow/vault/connection/*,skyflow/error/*,skyflow/utils/enums/*,skyflow/vault/controller/_audit.py,skyflow/vault/controller/_bin_look_up.py -m unittest discover + + - name: coverage + run: coverage xml -o test-coverage.xml + + - name: Codecov + uses: codecov/codecov-action@v2.1.0 + with: + token: ${{ secrets.CODECOV_REPO_UPLOAD_TOKEN }} + files: test-coverage.xml + name: codecov-skyflow-python + verbose: true diff --git a/README.md b/README.md index b64a0bc7..bfdab7a3 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,13 @@ # Skyflow-python --- -## Description -This Python SDK is designed to help developers easily implement Skyflow into their python backend. +## Description +This Python SDK is designed to help developers easily implement Skyflow into their python backend. ## Table of Contents + - [Skyflow-python](#skyflow-python) - [Description](#description) - [Table of Contents](#table-of-contents) @@ -18,6 +19,7 @@ This Python SDK is designed to help developers easily implement Skyflow into the - [Vault APIs](#vault-apis) - [Insert data into the vault](#insert-data-into-the-vault) - [Detokenize](#detokenize) + - [Tokenize](#tokenize) - [Get](#get) - [Get By Id](#get-by-id) - [Redaction Types](#redaction-types) @@ -28,7 +30,6 @@ This Python SDK is designed to help developers easily implement Skyflow into the - [Logging](#logging) - [Reporting a Vulnerability](#reporting-a-vulnerability) - ## Features Authentication with a Skyflow Service Account and generation of a bearer token @@ -41,7 +42,7 @@ Invoking connections to call downstream third party APIs without directly handli ### Requirements -- Python 3.7.0 and above +- Python 3.8.0 and above ### Configuration @@ -57,545 +58,734 @@ The [Service Account](https://github.com/skyflowapi/skyflow-python/tree/main/sky The `generate_bearer_token(filepath)` function takes the credentials file path for token generation, alternatively, you can also send the entire credentials as string, by using `generate_bearer_token_from_creds(credentials)` -[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/main/samples/sa_token_sample.py): +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): ```python -from skyflow.errors import SkyflowError +from skyflow.error import SkyflowError from skyflow.service_account import generate_bearer_token, is_expired # cache token for reuse -bearerToken = '' -tokenType = '' +bearer_token = '' +token_type = '' +def token_provider(): + global bearer_token + global token_type + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_toke('') + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' def token_provider(): - global bearerToken - global tokenType - - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token('') - return bearerToken, tokenType + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string) + return bearer_token, token_type try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) except SkyflowError as e: print(e) ``` +## Service Account Scoped Token Generation + +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] +} +def token_provider(): + global bearer_token + global token_type -[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/main/samples/generate_bearer_token_from_creds_sample.py): + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/scoped_token_generation_example.py): ```python -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token_from_creds, is_expired +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired # cache token for reuse -bearerToken = '' -tokenType = '' +bearer_token = '' +token_type = '' +options = { + 'role_ids': ['ROLE_ID1', 'ROLE_ID2'] +} def token_provider(): - global bearerToken - global tokenType + global bearer_token + global token_type # As an example - credentials = { - "clientID": "", - "clientName": "", - "keyID": "", - "tokenURI": '', - "privateKey": "" + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', } - jsonString = json.dumps(credentials) - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token_from_creds( - credentials=jsonString) - return bearerToken, tokenType + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) except SkyflowError as e: print(e) ``` -## Vault APIs +## Service Account Token Generation With Context -The [Vault](https://github.com/skyflowapi/skyflow-python/tree/main/skyflow/vault) python module is used to perform operations on the vault such as inserting records, detokenizing tokens, retrieving tokens for a skyflow_id and to invoke a connection. +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): -To use this module, the skyflow client must first be initialized as follows. +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': "" +} +def token_provider(): + global bearer_token + global token_type + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/token_generation_with_context_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': '' +} +def token_provider(): + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +## Service Account Signed Token Generation + +[Example using filepath](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): ```python -from skyflow.vault import Client, Configuration +from skyflow.error import SkyflowError from skyflow.service_account import generate_bearer_token, is_expired -# cache for reuse -bearerToken = '' +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90 # in seconds +} +def token_provider(): + global bearer_token + global token_type -# User defined function to provide access token to the vault apis + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('', options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) + +``` + +[Example using credentials string](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/service_account/signed_token_generation_example.py): + +```python +from skyflow.error import SkyflowError +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired + +# cache token for reuse +bearer_token = '' +token_type = '' +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90 # in seconds +} def token_provider(): - global bearerToken - if !(is_expired(bearerToken)): - return bearerToken - bearerToken, _ = generate_bearer_token('') - return bearerToken + global bearer_token + global token_type + # As an example + skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } + credentials_string = json.dumps(skyflow_credentials) + + if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(skyflow_credentials_string, options) + return bearer_token, token_type + +try: + bearer_token, token_type = token_provider() + print('Access Token:', bearer_token) + print('Type of token:', token_type) +except SkyflowError as e: + print(e) +``` + +## Vault APIs + +The vault python module is used to perform operations on the vault such as inserting records, detokenizing tokens, retrieving tokens for a skyflow_id and to invoke a connection. + +To use this module, the skyflow client must first be initialized as follows. + +```python +from skyflow import Env +from skyflow import Skyflow, LogLevel + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } +credentials_string = json.dumps(skyflow_credentials) + +# Pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: "API_KEY", # API_KEY + # path: "PATH", # path to credentials file + # credentials_string: credentials_string, # credentials as string +} -#Initializing a Skyflow Client instance with a SkyflowConfiguration object -config = Configuration('', '', token_provider) -client = Client(config) +client = ( + Skyflow.builder() + .add_vault_config({ + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials # individual credentials + }) + .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) ``` +Notes: + +- If both Skyflow common credentials and individual credentials at the configuration level are provided, the individual credentials at the configuration level will take priority. + All Vault APIs must be invoked using a client instance. ### Insert data into the vault -To insert data into your vault use the `insert(records: dict, options: InsertOptions)` method. The `records` parameter is a dictionary that requires a `records` key and takes an array of records to insert into the vault. The `options` parameter takes a dictionary of optional parameters for the insertion. This includes an option to return tokenized data, upsert records and continue on error. +To insert data into your vault, use the `insert` method. The `InsertRequest` class is used to create an insert request, which contains the values to be inserted in the form of a dictionary of records. Additionally, you can provide options in the insert request, such as returning tokenized data, upserting records, and continuing on error. + +Insert call schema ```python -# Optional, indicates whether you return tokens for inserted data. Defaults to 'true'. -tokens: bool -# Optional, indicates Upsert support in the vault. -upsert: [UpsertOption] -# Optional, decides whether to continue if error encountered or not -continueOnError: bool +#Initialize Client +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest + +try: + insert_data = [ + {'': ''}, + {'': ''} + ] + + + insert_request = InsertRequest( + table_name = '', + values = insert_data, + ) + + response = skyflow_client.vault('VAULT_ID').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` -Insert call schema +**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/insert_records.py)** + ```python -from skyflow.vault import InsertOptions, UpsertOption -from skyflow.errors import SkyflowError +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest -#Initialize Client try: - # Create an Upsert option. - upsertOption = UpsertOption(table="",column="") - options = InsertOptions(tokens=True, upsert=[upsertOption], continueOnError=False) + insert_data = [ + {'card_number': '4111111111111111'}, + ] - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True # returns tokens + ) + + response = client.vault('').insert(insert_request) print("Response:", response) except SkyflowError as e: print("Error Occurred:", e) + ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_sample.py)** +Skyflow returns tokens for the record you just inserted. ```python -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "cardNumber": "41111111111", - "cvv": "123", - }, + 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', + 'card_number': '5479-4229-4622-1393' } - ] - }, - InsertOptions(True), + ], + errors=[] ) ``` -Skyflow returns tokens for the record you just inserted. +**Insert call example with `continue_on_error` option** -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "cardNumber": "f3907186-e7e2-466f-91e5-48e12c2bcbc1", - "cvv": "1989cb56-63da-4482-a2df-1f74cd0dd1a5", - "skyflow_id": "d863633c-8c75-44fc-b2ed-2b58162d1117" - }, - "request_index": 0 - } +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest + +try: + insert_data = [ + {'card_number': '4111111111111111'}, + {'card_numbe': '4111111111111111'}, # Intentional typo card_numbe ] -} + + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True, # returns tokens + continue_on_error = True + ) + + response = client.vault('').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) + ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_with_continue_on_error_sample.py) with `continueOnError` option** +Sample Response ```python -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "card_number": "4111111111111111", - "full_name": "john doe" - } - }, - { - "table": "pii_field", - "fields": { - "card_number": "4242424242424200" - "full_name": "jane doe" - } + 'skyflow_id': '89c125d1-3bec-4360-b701-a032dda16500', + 'request_index': 0, + 'card_number': '5479-4229-4622-1393' } + ], + errors= + [ + { + 'request_index': 1, + 'error': 'Insert failed. Column card_numbe is invalid. Specify a valid column.' + } ] - }, InsertOptions(tokens=True, continueOnError=True) ) + ``` -Sample Response +**Insert call example with `upsert` options** -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "card_number": "f37186-e7e2-466f-91e5-48e2bcbc1", - "full_name": "1989cb56-63a-4482-adf-1f74cd1a5", - "skyflow_id": "3daf1a7f-bc7f-4fc9-8c56-a6e4e93231e6" - }, - "request_index": 0 - } - ], - "errors": [ - { - "error": { - "code": 404, - "description": "Object Name pii_field was not found for Vault - requestId : af4aad11-f276-474d-b626-c75c8b35d49e", - "request_index": 1 - } - } - ] -} +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import InsertRequest +try: + insert_data = [ + {"name": 'sample name'}, + ] + + insert_request = InsertRequest( + table_name = 'table1', + values = insert_data, + return_tokens = True, # returns tokens + upsert = "name" # unique column name + ) + + response = client.vault('').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` -**Insert call [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/insert_upsert_sample.py) with `upsert` options** +Skyflow returns tokens, with `upsert` support, for the record you just inserted. ```python -upsertOption = UpsertOption(table="cards",column="cardNumber") -client.insert( - { - "records": [ +InsertResponse( + inserted_fields= + [ { - "table": "cards", - "fields": { - "cardNumber": "41111111111", - "cvv": "123", - }, + 'skyflow_id': 'a8f3ed5d-55eb-4f32-bf7e-2dbf4b9d9097', + 'name': '3f27b3d7-6bf0-432a-acf9-789c0470e2da' } - ] - }, - InsertOptions(tokens=True,upsert=[upsertOption]), + ], + errors=[] ) ``` -Skyflow returns tokens, with `upsert` support, for the record you just inserted. - -```json -{ - "records": [ - { - "table": "cards", - "fields": { - "cardNumber": "f3907186-e7e2-466f-91e5-48e12c2bcbc1", - "cvv": "1989cb56-63da-4482-a2df-1f74cd0dd1a5", - "skyflow_id": "60b32788-12ec-4dd7-9da5-0146c3afbe11" - }, - "request_index": 0 - } - ] -} -``` - ### Detokenize -To retrieve tokens from your vault, you can use the `Detokenize(records: dict, options: DetokenizeOptions)` method.The records parameter takes a dictionary that contains the `records` key that takes an array of records to return. The options parameter is a `DetokenizeOptions` object that provides further options, including `continueOnError` operation, for your detokenize call, as shown below: +To retrieve tokens from your vault, you can use the `detokenize` method. The `DetokenizeRequest` class requires a list of detokenization data to be provided as input. Additionally, the redaction type and continue on error are optional parameters. ```python -{ - "records":[ - { - "token": str , # Token for the record to fetch - "redaction": Skyflow.RedactionType # Optional. Redaction to apply for retrieved data. E.g. RedactionType.MASKED - } - ] -} +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +try: + detokenize_data = ['', '', ''] + + detokenize_request = DetokenizeRequest( + tokens =d etokenize_data, + continue_on_error = False, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional + ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) + ``` + Notes: -- `redaction` defaults to [RedactionType.PLAIN_TEXT](#redaction-types). -- `continueOnError` in DetokenizeOptions will default to `True`. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/detokenize_sample.py) of a detokenize call: +- `redaction_type` defaults to `RedactionType.PLAIN_TEXT`. +- `continue_on_error` default valus is `False`. + +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/detokenize_records.py) of a detokenize call: ```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + try: - client.detokenize( - { - "records": [ - { - "token": "45012507-f72b-4f5c-9bf9-86b133bae719" - }, - { - "token": '1r434532-6f76-4319-bdd3-96281e051051', - "redaction": Skyflow.RedactionType.MASKED - }, - { - "token": "invalid-token" - } - ] - } + detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-3840'] + + detokenize_request = DetokenizeRequest( + tokens = detokenize_data, + continue_on_error = False, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "token": "131e70dc-6f76-4319-bdd3-96281e051051", - "value": "1990-01-01" - }, - { - "token": "1r434532-6f76-4319-bdd3-96281e051051", - "value": "xxxxxxer", - } - ], - "errors": [ - { - "token": "invalid-token", - "error": { - "code": 404, - "description": "Tokens not found for invalid-token" - } - } - ] -} +DetokenizeResponse( + detokenized_fields=[ + {'token': '9738-1683-0486-1480', 'value': '4111111111111115', 'type': 'STRING'}, + {'token': '6184-6357-8409-6668', 'value': '4111111111111119', 'type': 'STRING'}, + {'token': '4914-9088-2814-3840', 'value': '4111111111111118', 'type': 'STRING'} + ], + errors=[] +) ``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/detokenize_with_continue_on_error_sample.py) of a detokenize call with continueOnError: +An example of a detokenize call with continue_on_error: ```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + try: - client.detokenize( - { - "records": [ - { - "token": "45012507-f72b-4f5c-9bf9-86b133bae719" - }, - { - "token": '1r434532-6f76-4319-bdd3-96281e051051', - "redaction": Skyflow.RedactionType.MASKED - } - ] - }, DetokenizeOptions(continueOnError=False) + detokenize_data = ['9738-1683-0486-1480', '6184-6357-8409-6668', '4914-9088-2814-384'] + + detokenize_request = DetokenizeRequest( + tokens = detokenize_data, + continue_on_error = True, # optional + redaction_type = RedactionType.PLAIN_TEXT # optional ) + + response = skyflow_client.vault('').detokenize(detokenize_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "token": "131e70dc-6f76-4319-bdd3-96281e051051", - "value": "1990-01-01" - }, - { - "token": "1r434532-6f76-4319-bdd3-96281e051051", - "value": "xxxxxxer", - } - ] -} -``` - -### Get +DetokenizeResponse( + detokenized_fields=[ + { + 'token': '9738-1683-0486-1480', + 'value': '4111111111111115', + 'type': 'STRING' + }, + { + 'token': '6184-6357-8409-6668', + 'value': '4111111111111119', + 'type': 'STRING' + } + ], + errors=[ + { + 'token': '4914-9088-2814-384', + 'error': 'Token Not Found' + } + ] +) -To retrieve data using Skyflow IDs or unique column values, use the `get(records: dict,options: GetOptions)` method. The `records` parameter takes a Dictionary that contains either an array of Skyflow IDs or a unique column name and values.The second parameter options is a GetOptions object that retrieves tokens of Skyflow IDs. +``` -Note: +### Tokenize - - You can use either Skyflow IDs or `unique` values to retrieve records. You can't use both at the same time. - - GetOptions parameter applicable only for retrieving tokens using Skyflow ID. - - You can't pass GetOptions along with the redaction type. - - `tokens` defaults to false. +To tokenize data, use the `tokenize` method. The `TokenizeRequest` class is utilized to create a tokenize request. In this request, you specify the `values` parameter, which is a list of dictionaries. Each dictionary contains two keys: `value` and `column_group`. ```python -{ - 'records': [ - { - 'columnName': str, # Name of the unique column. - 'columnValues': [str], # List of unique column values. - 'table': str, # Name of table holding the data. - 'redaction': Skyflow.RedactionType, # Redaction applied to retrieved data. - } - ] -} - or -{ - 'records': [ - { - 'ids': [str], # List of Skyflow IDs. - 'table': str, # Name of table holding the data. - 'redaction': Skyflow.RedactionType, # Redaction applied to retrieved data. - } - ] -} +from skyflow.vault.tokens import TokenizeRequest +tokenize_request = TokenizeRequest( + values = [{ + 'value': '', + 'column_group': '' + }] +) ``` + Sample usage -The following snippet shows how to use the `get()` method. For details, see [get_sample.py](https://github.com/skyflowapi/skyflow-python/blob/main/samples/get_sample.py), - -```python -from skyflow.vault import RedactionType - -skyflowIDs = ['f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9'] -record = {'ids': skyflowIDs, 'table': 'cards', 'redaction':RedactionType.PLAIN_TEXT} -recordsWithUniqueColumn = - { - 'table': 'test_table', - 'columnName': 'card_number', - 'columnValues': ['123456'], - 'redaction': RedactionType.PLAIN_TEXT - } - -invalidID = ['invalid skyflow ID'] -badRecord = {'ids': invalidID, 'table': 'cards', 'redaction': RedactionType.PLAIN_TEXT} - -records = {'records': [record, badRecord]} - +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/tokenize_records.py) of a tokenize call: + +```python +from skyflow.error import SkyflowError +from skyflow.vault.tokens import TokenizeRequest + try: - client.get(records) -except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + tokenize_request = TokenizeRequest( + values = [{ + "value": '4111111111111111', + "column_group": "card_number_cg" + }] + ) + + response = client.vault('').tokenize(tokenize_request) + print(response) +except SyntaxError as e: + print('Error Occurred: ', e) ``` Sample response: ```python -{ - 'records': [ - { - 'fields': { - 'card_number': '4111111111111111', - 'cvv': '127', - 'expiry_date': '11/35', - 'fullname': 'monica', - 'skyflow_id': 'f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9' - }, - 'table': 'cards' - }, - { - 'fields': { - 'card_number': '123456', - 'cvv': '317', - 'expiry_date': '10/23', - 'fullname': 'sam', - 'skyflow_id': 'da26de53-95d5-4bdb-99db-8d8c66a35ff9' - }, - 'table': 'cards' - } - ], - 'errors': [ - { - 'error': { - 'code': '404', - 'description': 'No Records Found' - }, - 'skyflow_ids': ['invalid skyflow id'] - } - ] -} +TokenizeResponse( + tokenized_fields=[ + { + 'token': '5479-4229-4622-1393' + } + ] +) + ``` -The following snippet shows how to use the `get()` method with GetOptions. +### Get + +To retrieve data using Skyflow IDs or unique column values, use the `get` method. The `GetRequest` class is used to create a get request, where you specify parameters such as the table name, redaction type, Skyflow IDs, column names, column values, and return tokens. If Skyflow IDs are provided, column names and column values cannot be used. Similarly, if column names or column values are provided, Skyflow IDs cannot be used. ```python -from skyflow.vault import GetOptions +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest + +GetRequest( + table = '', + ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], + return_tokens = True, + redaction_type = RedactionType.PLAIN_TEXT +) + +# or + +GetRequest( + table = '', + column_name ='', + column_values = ['COLUMN_VALUE1>', 'COLUMN_VALUE2>'], + redaction_type = RedactionType.PLAIN_TEXT +) +``` + +Sample usage + +### Get By Column Name and Column Values + +The following snippet shows how to use the `get` method using column names and column values. For details, see [get_column_values.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_column_values.py), + +```python +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest -{ - 'records': [ - { - 'ids': ['56513264-fc45-41fa-9cb0-d1ad3602bc49','da26de53-95d5-4bdb-99db-8d8c66a35ff9'], - 'table': 'cards', - } - ] -} - try: - client.get(records, GetOptions(True)) + column_values = [ + '123456' + ] + + get_request = GetRequest( + table = 'table1', + column_name = 'card_number', # It must be configured as unique in the schema. + column_values = column_values, + redaction_type = RedactionType.PLAIN_TEXT + ) + + response = skyflow_client.vault('').get(get_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - 'records': [ - { - 'fields': { - 'card_number': '4555-5176-5936-1930', - 'cvv': '6ad5f708-2061-453e-9491-618a1f29a688', - 'skyflow_id': '56513264-fc45-41fa-9cb0-d1ad3602bc49' - }, - 'table': 'cards' - }, - { - 'fields': { - 'card_number': '8882-7418-2776-6660', - 'cvv': '25260679-e339-4b33-a5b0-c8b08df77af7', - 'skyflow_id': 'da26de53-95d5-4bdb-99db-8d8c66a35ff9' - }, - 'table': 'cards' - } - ], - 'errors': [] -} -``` +GetResponse( + data=[ + { + 'card_number': '123456', + 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968' + } + ], + errors=[] +) -### Get By Id +``` -For retrieving using SkyflowID's, use the get_by_id(records: dict) method. The records parameter takes a Dictionary that contains records to be fetched as shown below: +### Get By Skyflow Ids ```python -{ - "records": [ - { - "ids": [str], # List of SkyflowID's of the records to be fetched - "table": str, # name of table holding the above skyflow_id's - "redaction": Skyflow.RedactionType, # redaction to be applied to retrieved data - } - ] -} +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest + +GetRequest( + table = '', + ids = ['SKYFLOW_ID1>', 'SKYFLOW_ID2>'], + return_tokens = True, + redaction_type = RedactionType.PLAIN_TEXT +) ``` #### Redaction Types + There are 4 accepted values in Skyflow.RedactionTypes: - `PLAIN_TEXT` @@ -603,245 +793,244 @@ There are 4 accepted values in Skyflow.RedactionTypes: - `REDACTED` - `DEFAULT` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/get_by_ids_sample.py) of get_by_id call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/get_records.py) of get by skyflow ids call: ```python -from skyflow.vault import RedactionType +from skyflow.error import SkyflowError +from skyflow.utils.enums import RedactionType +from skyflow.vault.data import GetRequest -skyflowIDs = [ - "f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9", - "da26de53-95d5-4bdb-99db-8d8c66a35ff9" -] -record = {"ids": skyflowIDs, "table": "cards", "redaction": RedactionType.PLAIN_TEXT} +try: + get_request = GetRequest( + table = 'table1', + ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], + redaction_type = RedactionType.PLAIN_TEXT + ) -invalidID = ["invalid skyflow ID"] -badRecord = {"ids": invalidID, "table": "cards", "redaction": RedactionType.PLAIN_TEXT} + response = client.vault('').get(get_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) +``` -records = {"records": [record, badRecord]} +Sample response: + +```python +GetResponse( + data=[ + { + 'card_number': '4555555555555553', + 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + }, + { + 'card_number': '4555555555555559', + 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + } + ], + errors=[] +) + +``` + +The following snippet shows how to use the `get()` method with return_tokens true. + +```python +from skyflow.error import SkyflowError +from skyflow.vault.data import GetRequest try: - client.get_by_id(records) + get_request = GetRequest( + table = 'table1', + ids = ['aea64577-12b1-4682-aad5-a183194c3f3d', 'b385c565-86eb-4af2-b959-8376f9b0754b'], + return_tokens = True + ) + + response = client.vault('').get(get_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records": [ - { - "fields": { - "card_number": "4111111111111111", - "cvv": "127", - "expiry_date": "11/35", - "fullname": "myname", - "skyflow_id": "f8d8a622-b557-4c6b-a12c-c5ebe0b0bfd9" - }, - "table": "cards" - }, - { - "fields": { - "card_number": "4111111111111111", - "cvv": "317", - "expiry_date": "10/23", - "fullname": "sam", - "skyflow_id": "da26de53-95d5-4bdb-99db-8d8c66a35ff9" - }, - "table": "cards" - } - ], - "errors": [ - { - "error": { - "code": "404", - "description": "No Records Found" - }, - "skyflow_ids": ["invalid skyflow id"] - } - ] -} +GetResponse( + data=[ + { + 'card_number': '3562-0140-8820-7499', + 'skyflow_id': 'aea64577-12b1-4682-aad5-a183194c3f3d' + }, + { + 'card_number': '3998-2139-0328-0697', + 'skyflow_id': 'b385c565-86eb-4af2-b959-8376f9b0754b' + } + ], + errors=[] +) ``` -`Note:` While using detokenize and get_by_id methods, there is a possibility that some or all of the tokens might be invalid. In such cases, the data from response consists of both errors and detokenized records. In the SDK, this will raise a SkyflowError Exception and you can retrieve the data from this Exception object as shown above. - ### Update -To update data in your vault, use the `update(records: dict, options: UpdateOptions)` method. The `records` parameter takes a Dictionary that contains records to fetch. If `UpdateTokens` is `True`, Skyflow returns tokens for the record you just updated. If `UpdateOptions` is `False`, Skyflow returns IDs for the record you updated. +To update data in your vault, use the `update` method. The `UpdateRequest` class is used to create an update request, where you specify parameters such as the table name, data (as a dictionary), tokens, return_tokens, and token_strict. If `return_tokens` is set to True, Skyflow returns tokens for the updated records. If `return_tokens` is set to False, Skyflow returns IDs for the updated records. ```python -# Optional, indicates whether to return all fields for updated data. Defaults to 'true'. -options: UpdateOptions -``` +from skyflow.error import SkyflowError +from skyflow.vault.data import UpdateRequest -```python -{ - 'records': [ - { - 'id': str, # Skyflow ID of the record to be updated. - 'table': str, # Name of table holding the skyflowID. - 'fields': { - str: str # Name of the column and value to update. - } - } - ] -} +try: + update_data = { + 'skyflow_id': '', + '': '' + } + + update_request = UpdateRequest( + table='TABLE_NAME', + data=update_data + ) + + response = skyflow_client.vault('VAULT_ID').update(update_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) ``` + Sample usage -The following snippet shows how to use the `update()` method. For details, see [update_sample.py](https://github.com/skyflowapi/skyflow-python/blob/main/samples/update_sample.py), +The following snippet shows how to use the `update()` method. For details, see [update_record.py](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/update_record.py), ```python -records = { - 'records': [ - { - 'id': '56513264-fc45-41fa-9cb0-d1ad3602bc49', - 'table': 'cards', - 'fields': { - 'card_number': '45678910234' - } - } - ] - } +from skyflow.error import SkyflowError +from skyflow.vault.data import UpdateRequest + try: - client.update(records, UpdateOptions(True)) + update_data = { + 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', + 'card_number': '4111111111117777' + } + + update_request = UpdateRequest( + table = 'table1', + data = update_data + ) + + response = skyflow_client.vault('').update(update_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) - else: - print(e) + print('Error Occurred:', e) ``` Sample response -`UpdateOptions` set to `True` +`return_tokens` set to `True` ```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49', - 'fields':{ - 'card_number':'0051-6502-5704-9879' - } - } - ], - 'errors':[] -} -``` - -`UpdateOptions` set to `False` +UpdateResponse( + updated_field={ + 'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315', + 'card_number': '4131-1751-0217-8491' + }, + errors=[] +) -```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49' - } - ], - 'errors':[] -} ``` -Sample Error +`return_tokens` set to `False` ```python -{ - 'records':[ - { - 'id':'56513264-fc45-41fa-9cb0-d1ad3602bc49' - } - ], - 'errors':[ - { - 'error':{ - 'code':404, - 'description':'Token for skyflowID doesn"t exist in vault - Request ID: a8def196-9569-9cb7-9974-f899f9e4bd0a' - } - } - ] -} +UpdateResponse( + updated_field={'skyflow_id': '3b80c76a-c0d7-4c02-be00-b4128cb0f315'}, + errors=[] +) + ``` ### Delete -For deleting using SkyflowID's, use the delete(records: dict) method. The records parameter takes a Dictionary that contains records to be deleted as shown below: +To delete records using Skyflow IDs, use the `delete` method. The `DeleteRequest` class accepts a list of Skyflow IDs that you want to delete, as shown below: ```python -{ - "records": [ - { - "id": str, # SkyflowID of the records to be deleted - "table": str, # name of table holding the above skyflow_id - }, - { - "id": str, # SkyflowID of the records to be deleted - "table": str, # name of table holding the above skyflow_id - } - ] -} +from skyflow.error import SkyflowError +from skyflow.vault.data import DeleteRequest + +primary_delete_ids = [ + 'SKYFLOW_ID1', + 'SKYFLOW_ID2', + 'SKYFLOW_ID3', +] + +delete_request = DeleteRequest( + table = '', + ids = primary_delete_ids +) ``` -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/delete_sample.py) of delete call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/delete_records.py) of delete call: ```python +from skyflow.error import SkyflowError +from skyflow.vault.data import DeleteRequest -skyflowID = "b3d52e6d-1d6c-4750-ba28-aa30d04dbf01" -record = {"id": skyflowID, "table": "cards"} - -invalidID = "invalid skyflow ID" -badRecord = {"id": invalidID, "table": "cards"} +try: + delete_ids = [ + '77e093f8-3ace-4295-8683-bb6745d6178e', + 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' + ] -records = {"records": [record, badRecord]} + delete_request = DeleteRequest( + table='table1', + ids=delete_ids + ) -try: - client.delete(records) + response = client.vault('').delete(delete_request) + print('Response:', response) except SkyflowError as e: - if e.data: - print(e.data) # see note below - else: - print(e) + print('Error Occurred:', e) + ``` Sample response: ```python -{ - "records":[ - { - "skyflow_id":"b3d52e6d-1d6c-4750-ba28-aa30d04dbf01", - "deleted":true - } - ], - "errors":[ - { - "id":"invalid skyflow ID", - "error":{ - "code":404, - "description":"No Records Found - request id: 239d462c-aa13-9f9d-a349-165b3dd11217" - } - } - ] -} +DeleteResponse( + deleted_ids=[ + '77e093f8-3ace-4295-8683-bb6745d6178e', + 'bf5989cc-79e8-4b2f-ad71-cb20b0a76091' + ], + errors=[] +) + ``` ### Invoke Connection -Using Skyflow Connection, end-user applications can integrate checkout/card issuance flow with their apps/systems. To invoke connection, use the invoke_connection(config: Skyflow.ConnectionConfig) method of the Skyflow client. +Using Skyflow Connection, end-user applications can integrate checkout/card issuance flow with their apps/systems. To invoke connection, use the `invoke` method of the Skyflow client. ```python -config = ConnectionConfig( - connectionURL: str, # connection url received when creating a skyflow connection integration - methodName: Skyflow.RequestMethod, - pathParams: dict, # optional - queryParams: dict, # optional - requestHeader: dict, # optional - requestBody: dict, # optional +from skyflow.error import SkyflowError +from skyflow.vault.connection import InvokeConnectionRequest + +body = { + 'KEY1': 'VALUE1', + 'KEY2': 'VALUE2' +} +headers = { + 'KEY1': 'VALUE1' +} +path_params = { + 'KEY1': 'VALUE1' +} +query_params = { + 'KEY1': 'VALUE1' +} + +invoke_connection_request = InvokeConnectionRequest( + method = Method.POST, + body = body, + headers = headers, # optional + path_params = path_params, # optional + query_params = query_params # optional ) -client.invokeConnection(config) ``` `methodName` supports the following methods: @@ -852,77 +1041,88 @@ client.invokeConnection(config) - PATCH - DELETE -**pathParams, queryParams, requestHeader, requestBody** are the JSON objects represented as dictionaries that will be sent through the connection integration url. +**path_params, query_params, request_header, request_body** are the JSON objects represented as dictionaries that will be sent through the connection integration url. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/invoke_connection_sample.py) of invoke_connection: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/invoke_connection.py) of invoke_connection: ```python -from skyflow.vault import ConnectionConfig, Configuration, RequestMethod +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow.utils.enums import Method +from skyflow.error import SkyflowError +from skyflow.vault.connection import InvokeConnectionRequest + +credentials = { + 'path': '/path/to/credentials.json', +} -bearerToken = '' -def token_provider(): - global bearerToken - if !(is_expired(bearerToken)): - return bearerToken - bearerToken, _ = generate_bearer_token('') - return bearerToken +client = ( + Skyflow.builder() + .add_connection_config({ + 'connection_id': '', + 'connection_url': '', + 'credentials': credentials + }) + .set_log_level(LogLevel.OFF) + .build() +) -try: - config = Configuration('', '', token_provider) - connectionConfig = ConnectionConfig('', RequestMethod.POST, - requestHeader={ - 'Content-Type': 'application/json', - 'Authorization': '' - }, - requestBody= # For third party integration - { - "expirationDate": { - "mm": "12", - "yy": "22" - } +invoke_connection_request = InvokeConnectionRequest( + method=Method.POST, + body={ + 'card_number': '4337-1696-5866-0865', + 'ssn': '524-41-4248' }, - pathParams={'cardID': ''}) # param as in the example - client = Client(config) + headers = { + 'Content-Type': 'application/json' + } +) + +response = client.connection('').invoke(invoke_connection_request) + +print(response) - response = client.invoke_connection(connectionConfig) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) ``` Sample response: ```python -{ - "receivedTimestamp": "2021-11-05 13:43:12.534", - "processingTimeinMs": 12, - "resource": { - "cvv2": "558" +ConnectionResponse( + { + 'card_number': '4337-1696-5866-0865', + 'ssn': '524-41-4248', + 'request_id': '84796a11-0b7d-4cb0-a348-cf9fefb5886f,84796a11-0b7d-4cb0-a348-cf9fefb5886f' } -} +) + ``` ### Query -To retrieve data with SQL queries, use the `query(queryInput, options)` method. `queryInput` is an object that takes the `query` parameter as follows: +To retrieve data with SQL queries, use the `query` method. `QueryRequest` is class that takes the `query` parameter as follows: ```python -{ - query: str # SQL query statement -} +from skyflow.vault.data import QueryRequest + +query_request = QueryRequest( + query= '' +) ``` See [Query your data](https://docs.skyflow.com/query-data/) and [Execute Query](https://docs.skyflow.com/record/#QueryService_ExecuteQuery) for guidelines and restrictions on supported SQL statements, operators, and keywords. -An [example](https://github.com/skyflowapi/skyflow-python/blob/main/samples/query_sample.py) of Query call: +An [example](https://github.com/skyflowapi/skyflow-python/blob/SK-1749-readme/samples/vault_api/query_records.py) of Query call: ```python -queryInput = { - query: "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" -} +from skyflow.error import SkyflowError +from skyflow.vault.data import QueryRequest + +query_request = QueryRequest( + query = "SELECT * FROM cards WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" +) try: - client.query(queryInput) + skyflow_client.vault('').query(query_request) except SkyflowError as e: if e.data: print(e.data) @@ -933,37 +1133,58 @@ except SkyflowError as e: Sample Response ```python -{ - "records": [ - { - "fields": { - "card_number": "XXXXXXXXXXXX1111", - "card_pin": "*REDACTED*", - "cvv": "", - "expiration_date": "*REDACTED*", - "expiration_month": "*REDACTED*", - "expiration_year": "*REDACTED*", - "name": "a***te", - "skyflow_id": "3ea3861-x107-40w8-la98-106sp08ea83f", - "ssn": "XXX-XX-6789", - "zip_code": None - }, - "tokens": None - } - ] -} +QueryResponse( + fields=[ + { + 'card_number': 'XXXXXXXXXXXX1112', + 'name': 'S***ar', + 'skyflow_id': '4f7af9f9-09e0-4f47-af8e-04c9b1ee1968', + 'tokenized_data': {} + } + ], + errors=[] +) ``` ## Logging -The skyflow python SDK provides useful logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(logLevel)` as shown below: +The skyflow python SDK provides useful logging using python's inbuilt `logging` library. By default the logging level of the SDK is set to `LogLevel.ERROR`. This can be changed by using `set_log_level(log_level)` as shown below: ```python -import logging -from skyflow import set_log_level, LogLevel +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', + } +credentials_string = json.dumps(skyflow_credentials) + +# Pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: "API_KEY", # API_KEY + # path: "PATH", # path to credentials file + # credentials_string: credentials_string, # credentials as string +} -logging.basicConfig() # You can set the basic config here -set_log_level(LogLevel.INFO) # sets the skyflow SDK log level to INFO +client = ( + Skyflow.builder() + .add_vault_config({ + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials # individual credentials + }) + .add_skyflow_credentials(credentials) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) ``` Current the following 5 log levels are supported: diff --git a/ci-scripts/bump_version.sh b/ci-scripts/bump_version.sh index b0a57a9e..a770e905 100755 --- a/ci-scripts/bump_version.sh +++ b/ci-scripts/bump_version.sh @@ -1,22 +1,24 @@ Version=$1 SEMVER=$Version -if [ -z $2 ] +if [ -z "$2" ] then - echo "Bumping package version to $1" + echo "Bumping package version to $1" - sed -E "s/current_version = .+/current_version = \'$SEMVER\'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile - sed -E "s/SDK_VERSION = .+/SDK_VERSION = \'$SEMVER\'/g" skyflow/version.py > tempfile && cat tempfile > skyflow/version.py && rm -f tempfile - - echo -------------------------- - echo "Done, Package now at $1" + sed -E "s/current_version = .+/current_version = '$SEMVER'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile + sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$SEMVER'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile + echo -------------------------- + echo "Done, Package now at $1" else - echo "Bumping package version to $1-dev.$2" + # Use dev version with commit SHA + DEV_VERSION="${SEMVER}.dev0+$(echo $2 | tr -dc '0-9a-f')" + + echo "Bumping package version to $DEV_VERSION" - sed -E "s/current_version = .+/current_version = \'$SEMVER-dev.$2\'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile - sed -E "s/SDK_VERSION = .+/SDK_VERSION = \'$SEMVER-dev.$2\'/g" skyflow/version.py > tempfile && cat tempfile > skyflow/version.py && rm -f tempfile + sed -E "s/current_version = .+/current_version = '$DEV_VERSION'/g" setup.py > tempfile && cat tempfile > setup.py && rm -f tempfile + sed -E "s/SDK_VERSION = .+/SDK_VERSION = '$DEV_VERSION'/g" skyflow/utils/_version.py > tempfile && cat tempfile > skyflow/utils/_version.py && rm -f tempfile - echo -------------------------- - echo "Done, Package now at $1-dev.$2" + echo -------------------------- + echo "Done, Package now at $DEV_VERSION" fi diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..46a85940 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.25.3, < 2.1.0 +pydantic >= 2 +typing-extensions >= 4.7.1 +DateTime~=5.5 +PyJWT~=2.9.0 +requests~=2.32.3 +coverage +cryptography +python-dotenv~=1.0.1 \ No newline at end of file diff --git a/samples/README.md b/samples/README.md deleted file mode 100644 index 64863dbf..00000000 --- a/samples/README.md +++ /dev/null @@ -1,211 +0,0 @@ -# Python SDK samples - -Test the SDK by adding `VAULT-ID`, `VAULT-URL`, and `SERVICE-ACCOUNT` details in -the required places for each sample. - -## Prerequisites -- A Skyflow account. If you don't have one, register for one on the - [Try Skyflow](https://skyflow.com/try-skyflow) page. -- Python 3.7.0 or higher. - -## Prepare - -### Install the Python SDK - -```bash -pip install skyflow -``` - -### Create the vault - -1. In a browser, sign in to Skyflow Studio. -2. Create a vault by clicking **Create Vault** > **Start With a Template** > - **Quickstart vault**. -3. Once the vault is ready, click the gear icon and select **Edit Vault Details**. -4. Note your **Vault URL** and **Vault ID** values, then click **Cancel**. - You'll need these later. - -### Create a service account - -1. In the side navigation click, **IAM** > **Service Accounts** > **New Service Account**. -2. For **Name**, enter "SDK Sample". For **Roles**, choose **Vault Editor**. -3. Click **Create**. Your browser downloads a **credentials.json** file. Keep - this file secure, as You'll need it for each of the samples. - -## The samples -### [Get data](./get_sample.py) - -To retrieve data using Skyflow IDs or unique column values, use the `get(records: dict)` method. The `records` parameter takes a Dictionary that contains either an array of Skyflow IDs or a unique column name and values. - -Note: You can use either Skyflow IDs or `unique` values to retrieve records. You can't use both at the same time. -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | One of the four Redaction Types. | -| `` | Skyflow Id of the record to be fetched. | -| `` | Unique column name to fetch the data. | -| `` | Column value of the corresponding column. | - -#### Run the sample - -```bash -python3 get_sample.py -``` -### [Get data by ID](./get_by_ids_sample.py) - -Get data using Skyflow IDs for the desired records. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | Skyflow ID of the first record. | -| `` | Skyflow ID of the second record. | -| `` | Skyflow ID of the third record. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to get data from. | - -#### Run the sample - -```bash -python3 get_by_ids_sample.py -``` - - -### [Update data](./update_sample.py) - -Update data in the vault. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | Skyflow Id of the record to be updated. | -| `` | Name of the column to update data. | -| `` | Valid value to update into the corresponding column. | - -#### Run the sample - -```bash -python3 update_sample.py -``` - -### [Insert data](./insert_sample.py) - -Insert data in the vault. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the table to insert data into. | -| `` | Name of the column to insert data into. | -| `` | Valid value to insert into the corresponding column. | - -#### Run the sample - -```bash -python3 insert_sample.py -``` - -### [Detokenize data](./detokenize_sample.py) - -Detokenize a data token from the vault. Make sure the specified token is for -data that exists in the vault. If you need a valid token, use -[insert_sample.py](insert_sample.py) to insert the data, then use this data's -token for detokenization. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | Name of the column to insert data into. | -| `` | Token for the data you want to detokenize. | - -#### Run the sample - -```bash -python3 detokenize_sample.py -``` - -### [Invoke a connection](./invoke_connection_sample.py) - -Skyflow Connections is a gateway service that uses Skyflow's underlying -tokenization capabilities to securely connect to first-party and third-party -services. This way, your infrastructure is never directly exposed to sensitive -data, and you offload security and compliance requirements to Skyflow. - -#### Configure - -Replace the following values in the sample file: - -| Value | Description | -| ------------------------------ | ------------------------------------------------------- | -| `` | ID of your vault. | -| `` | URL of your vault. | -| `` | relative path to your service account credentials file. | -| `` | URL of your connection. | -| `` | Access token for your connection. | -| `requestBody` | Your request body content. | -| `pathParams` | Your path parameters. | - -#### Run the sample - -```bash -python3 invoke_connection_sample.py -``` - -### [Service account token generation](./sa_token_sample.py) - -Generates SA Token using path of credentials file. - -#### Configure - -Replace `` with the relative path to your service account credentials file. - -#### Run the sample - -```bash -python3 sa_token_sample.py -``` - -### [Generate Bearer Token](./generate_bearer_token_from_creds_sample.py) - -Generates SA Token using json content of credentials file. - -#### Configure - -Replace `credentials` with the content of service account credentials file. - -#### Run the sample - -```bash -python3 generate_bearer_token_from_creds_sample.py -``` diff --git a/samples/delete_sample.py b/samples/delete_sample.py deleted file mode 100644 index 85137e6f..00000000 --- a/samples/delete_sample.py +++ /dev/null @@ -1,40 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration,DeleteOptions - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - options = DeleteOptions(False) - - data = {"records": [ - { - "id": "", - "table": "", - }, - { - "id": "", - "table": "", - } - ]} - - response = client.delete(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/detokenize_sample.py b/samples/detokenize_sample.py deleted file mode 100644 index 4cfae8c1..00000000 --- a/samples/detokenize_sample.py +++ /dev/null @@ -1,41 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration -from skyflow.vault import RedactionType - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = { - "records": [ - { - "token": '', - "redaction": RedactionType.MASKED - }, - { - "token": '', - } - ] - } - response = client.detokenize(data) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/detokenize_with_continue_on_error_sample.py b/samples/detokenize_with_continue_on_error_sample.py deleted file mode 100644 index 18927fc1..00000000 --- a/samples/detokenize_with_continue_on_error_sample.py +++ /dev/null @@ -1,54 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration -from skyflow.vault import RedactionType -from skyflow.vault._config import DetokenizeOptions - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - -def detokenize(client, data): - try: - response = client.detokenize(data, DetokenizeOptions(continueOnError=True)) - print('Response:', response) - except SkyflowError as e: - print('Error Occurred:', e) - -def bulkDetokenize(client, data): - try: - response = client.detokenize(data, DetokenizeOptions(continueOnError=False)) - print('Response:', response) - except SkyflowError as e: - print('Error Occurred:', e) - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = { - "records": [ - { - "token": '', - "redaction": RedactionType.MASKED - }, - { - "token": '', - } - ] - } - - detokenize(client, data) - bulkDetokenize(client, data) -except Exception as e: - print('Something went wrong:', e) diff --git a/samples/generate_bearer_token_from_creds_sample.py b/samples/generate_bearer_token_from_creds_sample.py deleted file mode 100644 index 7c36c47e..00000000 --- a/samples/generate_bearer_token_from_creds_sample.py +++ /dev/null @@ -1,45 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token_from_creds, is_expired - -''' - This sample demonstrates the usage of generate_bearer_token_from_creds - - - Use json.dumps(credentialsString) to make it a valid json string - - Use generate_bearer_token_from_creds(jsonString) to get the Bearer Token -''' - -# cache token for reuse -bearerToken = '' -tokenType = '' - - -def token_provider(): - global bearerToken - global tokenType - # As an example - credentials = { - "clientID": "", - "clientName": "", - "keyID": "", - "tokenURI": '', - "privateKey": "" - } - jsonString = json.dumps(credentials) - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token_from_creds( - credentials=jsonString) - - return bearerToken - - -try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) -except SkyflowError as e: - print(e) diff --git a/samples/get_by_ids_sample.py b/samples/get_by_ids_sample.py deleted file mode 100644 index 9eeece8d..00000000 --- a/samples/get_by_ids_sample.py +++ /dev/null @@ -1,36 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - } - ]} - - response = client.get_by_id(data) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/get_sample.py b/samples/get_sample.py deleted file mode 100644 index 3a741864..00000000 --- a/samples/get_sample.py +++ /dev/null @@ -1,45 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType, GetOptions - - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = GetOptions(False) - - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - }, - #To get records using unique column name and values. - { - "redaction" : "", - "table": "", - "columnName": "", - "columnValues": ["", ""], - } - ]} - - response = client.get(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/get_with_options.py b/samples/get_with_options.py deleted file mode 100644 index 8fda1136..00000000 --- a/samples/get_with_options.py +++ /dev/null @@ -1,34 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RedactionType, GetOptions - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - options = GetOptions(False) - data = {"records": [ - { - "ids": ["", "", ""], - "table": "", - "redaction": RedactionType.PLAIN_TEXT - } - ]} - - response = client.get(data,options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_sample.py b/samples/insert_sample.py deleted file mode 100644 index 14756c92..00000000 --- a/samples/insert_sample.py +++ /dev/null @@ -1,38 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = InsertOptions(True) - - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_upsert_sample.py b/samples/insert_upsert_sample.py deleted file mode 100644 index aec06c49..00000000 --- a/samples/insert_upsert_sample.py +++ /dev/null @@ -1,39 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration, UpsertOption - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - upsertOption = UpsertOption(table='',column='') - options = InsertOptions(tokens=True,upsert=[upsertOption]) - - data = { - 'records': [ - { - 'table': '', - 'fields': { - '': '' - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/insert_with_continue_on_error_sample.py b/samples/insert_with_continue_on_error_sample.py deleted file mode 100644 index 19df2b20..00000000 --- a/samples/insert_with_continue_on_error_sample.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, InsertOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = InsertOptions(tokens=True, continueOnError=True) - - data = { - "records": [ - { - "table": "", - "fields": { - "": "" - } - }, - { - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.insert(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/invoke_connection_sample.py b/samples/invoke_connection_sample.py deleted file mode 100644 index 7cf54319..00000000 --- a/samples/invoke_connection_sample.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration, RequestMethod, ConnectionConfig - -''' -This sample is for generating CVV using Skyflow Connection with a third party integration such as VISA -''' - -# cache token for reuse -bearerToken = '' - - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - connectionConfig = ConnectionConfig('', RequestMethod.POST, - requestHeader={ - 'Content-Type': 'application/json', - 'Authorization': '' - }, - requestBody= # For third party integration - { - "expirationDate": { - "mm": "12", - "yy": "22" - } - }, - pathParams={'cardID': ''}) # param as in the example - client = Client(config) - - response = client.invoke_connection(connectionConfig) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/query_sample.py b/samples/query_sample.py deleted file mode 100644 index 6b4d507c..00000000 --- a/samples/query_sample.py +++ /dev/null @@ -1,35 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' - -from skyflow import set_log_level, LogLevel -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - set_log_level(LogLevel.DEBUG) - - data = { - "query": "" - } - response = client.query(data) - print('Response:', response) -except SkyflowError as e: - if(e.data): - print('Error Occurred:', e.data) - else: - print('Error Occurred:', e.message) \ No newline at end of file diff --git a/samples/sa_token_sample.py b/samples/sa_token_sample.py deleted file mode 100644 index 9169cb4b..00000000 --- a/samples/sa_token_sample.py +++ /dev/null @@ -1,26 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired - -# cache token for reuse -bearerToken = '' -tokenType = '' - - -def token_provider(): - global bearerToken - global tokenType - if is_expired(bearerToken): - bearerToken, tokenType = generate_bearer_token( - '') - return bearerToken, tokenType - - -try: - accessToken, tokenType = token_provider() - print("Access Token:", accessToken) - print("Type of token:", tokenType) -except SkyflowError as e: - print(e) diff --git a/samples/service_account/scoped_token_generation_example.py b/samples/service_account/scoped_token_generation_example.py new file mode 100644 index 00000000..e94f6433 --- /dev/null +++ b/samples/service_account/scoped_token_generation_example.py @@ -0,0 +1,36 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = '' +bearer_token = '' + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) +# Generate bearer token from credentials file path + +options = {'role_ids': ['ROLE_ID1', 'ROLE_ID2']} +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token( + '', options + ) + + print(bearer_token, token_type) + + +# Generate bearer token from credentials string +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds( + credentials_string, options + ) + + print(bearer_token, token_type) diff --git a/samples/service_account/signed_token_generation_example.py b/samples/service_account/signed_token_generation_example.py new file mode 100644 index 00000000..130cb6e8 --- /dev/null +++ b/samples/service_account/signed_token_generation_example.py @@ -0,0 +1,38 @@ +import json +from skyflow.service_account import ( + is_expired, + generate_signed_data_tokens, + generate_signed_data_tokens_from_creds, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + + +options = { + 'ctx': 'CONTEX_ID', + 'data_tokens': ['DATA_TOKEN1', 'DATA_TOKEN2'], + 'time_to_live': 90, # in seconds +} + +# Generate bearer token from credentials file path +if is_expired(bearer_token): + actual_token, signed_token = generate_signed_data_tokens( + '', options + ) + + +# Generate bearer token from credentials string +if is_expired(bearer_token): + actual_token, signed_token = generate_signed_data_tokens_from_creds( + credentials_string, options + ) diff --git a/samples/service_account/token_generation_example.py b/samples/service_account/token_generation_example.py new file mode 100644 index 00000000..1c44c0a5 --- /dev/null +++ b/samples/service_account/token_generation_example.py @@ -0,0 +1,32 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + + +# Generate bearer token from credentials file path +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token('') + + print(bearer_token, token_type) + + +# Generate bearer token from credentials string +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds(credentials_string) + + print(bearer_token, token_type) diff --git a/samples/service_account/token_generation_with_context_example.py b/samples/service_account/token_generation_with_context_example.py new file mode 100644 index 00000000..b2deb714 --- /dev/null +++ b/samples/service_account/token_generation_with_context_example.py @@ -0,0 +1,37 @@ +import json +from skyflow.service_account import ( + generate_bearer_token, + generate_bearer_token_from_creds, + is_expired, +) + +file_path = 'CREDENTIALS_FILE_PATH' +bearer_token = '' + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# Generate bearer token from credentials file path +options = {'ctx': ''} + +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token( + '', options + ) + + print(bearer_token, token_type) + + +# Generate bearer token from credentials string +if is_expired(bearer_token): + bearer_token, token_type = generate_bearer_token_from_creds( + credentials_string, options + ) + + print(bearer_token, token_type) diff --git a/samples/update_sample.py b/samples/update_sample.py deleted file mode 100644 index 9c6ea907..00000000 --- a/samples/update_sample.py +++ /dev/null @@ -1,39 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors import SkyflowError -from skyflow.service_account import generate_bearer_token, is_expired -from skyflow.vault import Client, UpdateOptions, Configuration - -# cache token for reuse -bearerToken = '' - -def token_provider(): - global bearerToken - if is_expired(bearerToken): - bearerToken, _ = generate_bearer_token('') - return bearerToken - - -try: - config = Configuration( - '', '', token_provider) - client = Client(config) - - options = UpdateOptions(True) - - data = { - "records": [ - { - "id": "", - "table": "", - "fields": { - "": "" - } - } - ] - } - response = client.update(data, options=options) - print('Response:', response) -except SkyflowError as e: - print('Error Occurred:', e) diff --git a/samples/vault_api/client_operations.py b/samples/vault_api/client_operations.py new file mode 100644 index 00000000..ad98d309 --- /dev/null +++ b/samples/vault_api/client_operations.py @@ -0,0 +1,70 @@ +import json +from skyflow import Skyflow, LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': '', + #'credentials_string': credentials_string +} + + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': '', # primary vault + 'cluster_id': '', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .set_log_level(LogLevel.ERROR) # set log level by default it is set to ERROR + .build() +) + + +# add vault config on the fly + +skyflow_client.add_vault_config( + { + 'vault_id': 'VAULT_ID2', # secondary vault + 'cluster_id': 'CLUSTER_ID2', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + # if you don't specify individual credentials, skyflow credentials will be used + } +) + + +skyflow_client.update_vault_config( + { + 'vault_id': 'VAULT_ID2', + 'cluster_id': 'CLUSTER_ID2', + 'credentials': credentials, # update credentials + } +) + + +# perform operations + +delete_request = DeleteRequest( + table = '', + ids = ['', ''] +) + +# perform delete call if you don't specify vault() it will return the first valid vault +response = skyflow_client.vault('VAULT_ID2').delete(delete_request) + +# remove vault on the fly +skyflow_client.remove_vault_config('VAULT_ID') diff --git a/samples/vault_api/credentials_options.py b/samples/vault_api/credentials_options.py new file mode 100644 index 00000000..09e02061 --- /dev/null +++ b/samples/vault_api/credentials_options.py @@ -0,0 +1,69 @@ +import json +from skyflow import Skyflow, LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': '', # primary vault + 'cluster_id': '', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + } + ) + .add_vault_config( + { + 'vault_id': '', + 'cluster_id': '', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.ERROR) # set log level by default it is set to ERROR + .build() +) + +primary_delete_ids = [ + 'SKYFLOW_ID1', + 'SKYFLOW_ID2', + 'SKYFLOW_ID3', +] + +# perform operations + +primary_delete_request = DeleteRequest(table='', ids=primary_delete_ids) + +# VAULT_ID1 will use credentials if you don't specify individual credentials at config level +response = skyflow_client.vault('VAULT_ID2').delete(primary_delete_request) + + +secondary_delete_ids = [ + 'SKYFLOW_ID1', + 'SKYFLOW_ID2', + 'SKYFLOW_ID3', +] + +secondary_delete_request = DeleteRequest(table='TABLE_NAME', ids=secondary_delete_ids) + +# VAULT_ID2 will use individual credentials at config level +response = skyflow_client.vault('VAULT_ID2').delete(primary_delete_request) diff --git a/samples/vault_api/delete_records.py b/samples/vault_api/delete_records.py new file mode 100644 index 00000000..d0960629 --- /dev/null +++ b/samples/vault_api/delete_records.py @@ -0,0 +1,53 @@ +import json +from skyflow import Skyflow +from skyflow import LogLevel +from skyflow import Env +from skyflow.vault.data import DeleteRequest + + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +primary_delete_ids = [ + 'SKYFLOW_ID1', + 'SKYFLOW_ID2', + 'SKYFLOW_ID3', +] + +delete_request = DeleteRequest(table='', ids=primary_delete_ids) + +response = client.vault('').delete(delete_request) + +print(response) diff --git a/samples/vault_api/detokenize_records.py b/samples/vault_api/detokenize_records.py new file mode 100644 index 00000000..192c7e75 --- /dev/null +++ b/samples/vault_api/detokenize_records.py @@ -0,0 +1,52 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import RedactionType +from skyflow.vault.tokens import DetokenizeRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', #API_KEY + # path: 'PATH', #path to credentials file + # credentials_string: credentials_string, #credentials as string +} + +client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + + +detokenize_data = ['TOKEN1', 'TOKEN2', 'TOKEN3'] + +detokenize_request = DetokenizeRequest( + tokens=detokenize_data, + redaction_type = RedactionType.PLAIN_TEXT +) + +response = client.vault('VAULT_ID').detokenize(detokenize_request) + +print(response) diff --git a/samples/vault_api/get_column_values.py b/samples/vault_api/get_column_values.py new file mode 100644 index 00000000..29272614 --- /dev/null +++ b/samples/vault_api/get_column_values.py @@ -0,0 +1,55 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import GetRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +column_values = [ + 'VALUE1', + 'VALUE2', +] + +get_ids = ['SKYFLOW_ID1', 'SKYFLOW_ID2'] + + +get_request = GetRequest( + table='TABLE_NAME', column_name='COLUMN_NAME', column_values=column_values +) + +response = client.vault('VAULT_ID').get(get_request) + +print(response) diff --git a/samples/vault_api/get_records.py b/samples/vault_api/get_records.py new file mode 100644 index 00000000..718bdd1a --- /dev/null +++ b/samples/vault_api/get_records.py @@ -0,0 +1,49 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import GetRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + + +get_ids = ['SKYFLOW_ID1', 'SKYFLOW_ID2'] + + +get_request = GetRequest(table='TABLE_NAME', ids=get_ids, return_tokens=True) + +response = client.vault('VAULT_ID').get(get_request) + +print(response) diff --git a/samples/vault_api/insert_byot.py b/samples/vault_api/insert_byot.py new file mode 100644 index 00000000..f2ec4773 --- /dev/null +++ b/samples/vault_api/insert_byot.py @@ -0,0 +1,60 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.error import SkyflowError +from skyflow.utils.enums import TokenStrict +from skyflow.vault.data import InsertRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) + +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +# Initialize Client + +try: + insert_data = [{'': ''}, {'': ''}] + + token_data = [{'': ''}, {'': ''}] + + insert_request = InsertRequest( + table_name='', + values=insert_data, + token_strict=TokenStrict.ENABLE, # token strict is enabled, + tokens=token_data, + ) + + response = skyflow_client.vault('VAULT_ID').insert(insert_request) + print('Response:', response) +except SkyflowError as e: + print('Error Occurred:', e) diff --git a/samples/vault_api/insert_records.py b/samples/vault_api/insert_records.py new file mode 100644 index 00000000..5e87f1d6 --- /dev/null +++ b/samples/vault_api/insert_records.py @@ -0,0 +1,54 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import InsertRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) +# please pass one of api_key, token, credentials_string & path as credentials +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +# sample data +insert_data = [ + {'': '', '': ''}, +] + +insert_request = InsertRequest( + table_name='TABLE_NAME', + values=insert_data, + continue_on_error=False, # if continue on error is set true we will return request_index for errors + return_tokens=True, +) + +response = skyflow_client.vault('VAULT_ID').insert(insert_request) + +print(response) diff --git a/samples/vault_api/invoke_connection.py b/samples/vault_api/invoke_connection.py new file mode 100644 index 00000000..f1d7f503 --- /dev/null +++ b/samples/vault_api/invoke_connection.py @@ -0,0 +1,65 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.utils.enums import Method +from skyflow.vault.connection import InvokeConnectionRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) +# please pass one of api_key, token, credentials_string & path as credentials + +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_connection_config( + { + 'connection_id': 'CONNECTION_ID', + 'connection_url': 'CONNECTION_URL', + 'credentials': credentials, + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + + +body = {'KEY1': 'VALUE1', 'KEY2': 'VALUE2'} +headers = {'KEY1': 'VALUE1'} +path_params = {'KEY1': 'VALUE1'} +query_params = {'KEY1': 'VALUE1'} + +invoke_connection_request = InvokeConnectionRequest( + method=Method.POST, + body=body, + headers=headers, # optional + path_params=path_params, # optional + query_params=query_params, # optional +) +# will return the first connection +response = skyflow_client.connection().invoke(invoke_connection_request) + +print(response) diff --git a/samples/vault_api/query_records.py b/samples/vault_api/query_records.py new file mode 100644 index 00000000..fa6ff6b7 --- /dev/null +++ b/samples/vault_api/query_records.py @@ -0,0 +1,56 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import QueryRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) +# please pass one of api_key, token, credentials_string & path as credentials + +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_connection_config( + { + 'connection_id': 'CONNECTION_ID', + 'connection_url': 'CONNECTION_URL', + 'credentials': credentials, + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +# sample query +query = '' + + +query_request = QueryRequest(query=query) + +response = skyflow_client.vault('VAULT_ID').query(query_request) + +print(response) diff --git a/samples/vault_api/tokenize_records.py b/samples/vault_api/tokenize_records.py new file mode 100644 index 00000000..3cf3f65c --- /dev/null +++ b/samples/vault_api/tokenize_records.py @@ -0,0 +1,56 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.tokens import TokenizeRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + "clientID": "", + "clientName": "", + "tokenURI": "", + "keyID": "", + "privateKey": "", +} +credentials_string = json.dumps(skyflow_credentials) +# please pass one of api_key, token, credentials_string & path as credentials + +credentials = { + "token": "BEARER_TOKEN", # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + "vault_id": "VAULT_ID", # primary vault + "cluster_id": "CLUSTER_ID", # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + "env": Env.PROD, # Env by default it is set to PROD + "credentials": credentials, # individual credentials + } + ) + .add_connection_config( + { + "connection_id": "CONNECTION_ID", + "connection_url": "CONNECTION_URL", + "credentials": credentials, + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +# tokenize only supports value and column_group +# sample data +tokenize_values = [{"": "", "": ""}] + +tokenize_request = TokenizeRequest(values=tokenize_values) + +response = skyflow_client.vault("VAULT_ID").tokenize(tokenize_request) + +print(response) diff --git a/samples/vault_api/update_record.py b/samples/vault_api/update_record.py new file mode 100644 index 00000000..ad61718d --- /dev/null +++ b/samples/vault_api/update_record.py @@ -0,0 +1,55 @@ +import json +from skyflow import Env +from skyflow import Skyflow, LogLevel +from skyflow.vault.data import UpdateRequest + +# To generate Bearer Token from credentials string. +skyflow_credentials = { + 'clientID': '', + 'clientName': '', + 'tokenURI': '', + 'keyID': '', + 'privateKey': '', +} +credentials_string = json.dumps(skyflow_credentials) +# please pass one of api_key, token, credentials_string & path as credentials + +credentials = { + 'token': 'BEARER_TOKEN', # bearer token + # api_key: 'API_KEY', # API_KEY + # path: 'PATH', # path to credentials file + # credentials_string: credentials_string, # credentials as string +} + +skyflow_client = ( + Skyflow.builder() + .add_vault_config( + { + 'vault_id': 'VAULT_ID', # primary vault + 'cluster_id': 'CLUSTER_ID', # ID from your vault URL Eg https://{clusterId}.vault.skyflowapis.com + 'env': Env.PROD, # Env by default it is set to PROD + 'credentials': credentials, # individual credentials + } + ) + .add_connection_config( + { + 'connection_id': 'CONNECTION_ID', + 'connection_url': 'CONNECTION_URL', + 'credentials': credentials, + } + ) + .add_skyflow_credentials( + credentials + ) # skyflow credentials will be used if no individual credentials are passed + .set_log_level(LogLevel.INFO) # set log level by default it is set to ERROR + .build() +) + +# sample data +update_data = {'skyflow_id': '', '': ''} + +update_request = UpdateRequest(table='TABLE_NAME', data=update_data) + +response = skyflow_client.vault('VAULT_ID').update(update_request) + +print(response) diff --git a/setup.py b/setup.py index c7756728..7f0e9e13 100644 --- a/setup.py +++ b/setup.py @@ -19,12 +19,14 @@ description='Skyflow SDK for the Python programming language', long_description=open('README.rst').read(), install_requires=[ - 'PyJWT', - 'datetime', - 'requests', - 'aiohttp', - 'asyncio', - 'cryptography>=3.3.1' + 'python_dateutil >= 2.5.3', + 'setuptools >= 21.0.0', + 'urllib3 >= 1.25.3, < 2.1.0', + 'pydantic >= 2', + 'typing-extensions >= 4.7.1', + 'DateTime~=5.5', + 'PyJWT~=2.9.0', + 'requests~=2.32.3' ], python_requires=">=3.7" ) diff --git a/skyflow/__init__.py b/skyflow/__init__.py index abeed968..fc02764f 100644 --- a/skyflow/__init__.py +++ b/skyflow/__init__.py @@ -1,4 +1,2 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._utils import set_log_level, LogLevel \ No newline at end of file +from .utils import LogLevel, Env +from .client import Skyflow diff --git a/skyflow/_utils.py b/skyflow/_utils.py deleted file mode 100644 index 83bf54a6..00000000 --- a/skyflow/_utils.py +++ /dev/null @@ -1,166 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import urllib.parse -import logging -from enum import Enum -import platform -import sys -from skyflow.version import SDK_VERSION - -skyflowLog = logging.getLogger('skyflow') -skyflowLog.setLevel(logging.ERROR) - -supported_content_types = { - "JSON": 'application/json', - "PLAINTEXT": 'text/plain', - "XML": 'text/xml', - "URLENCODED": 'application/x-www-form-urlencoded', - "FORMDATA": 'multipart/form-data', -} - - -class LogLevel(Enum): - DEBUG = logging.DEBUG - INFO = logging.INFO - WARN = logging.WARN - ERROR = logging.ERROR - OFF = logging.CRITICAL - - -def set_log_level(logLevel: LogLevel): - ''' - Sets the Log Level for the Skyflow python SDK - ''' - skyflowLog.setLevel(logLevel.value) - - -def log_info(message: str, interface: str): - formattedMessage = '{} {}'.format(interface, message) - skyflowLog.info(formattedMessage) - - -# def log_debug(message: str, interface: str): -# formattedMessage = '{} {}'.format(interface, message) -# skyflowLog.debug(formattedMessage) - - -def log_error(message: str, interface: str): - formattedMessage = '{} {}'.format(interface, message) - skyflowLog.error(formattedMessage) - - -class InfoMessages(Enum): - INITIALIZE_CLIENT = "Initializing skyflow client" - CLIENT_INITIALIZED = "Initialized skyflow client successfully" - VALIDATE_INSERT_RECORDS = "Validating insert records" - VALIDATE_DETOKENIZE_INPUT = "Validating detokenize input" - VALIDATE_GET_BY_ID_INPUT = "Validating getByID input" - VALIDATE_CONNECTION_CONFIG = "Validating connection config" - INSERT_DATA_SUCCESS = "Data has been inserted successfully." - DETOKENIZE_SUCCESS = "Data has been detokenized successfully." - GET_BY_ID_SUCCESS = "Data fetched from ID successfully." - QUERY_SUCCESS = "Query executed successfully." - BEARER_TOKEN_RECEIVED = "tokenProvider returned token successfully." - INSERT_TRIGGERED = "Insert method triggered." - DETOKENIZE_TRIGGERED = "Detokenize method triggered." - GET_BY_ID_TRIGGERED = "Get by ID triggered." - INVOKE_CONNECTION_TRIGGERED = "Invoke connection triggered." - QUERY_TRIGGERED = "Query method triggered." - GENERATE_BEARER_TOKEN_TRIGGERED = "Generate bearer token triggered" - GENERATE_BEARER_TOKEN_SUCCESS = "Generate bearer token returned successfully" - IS_TOKEN_VALID_TRIGGERED = "isTokenValid() triggered" - IS_EXPIRED_TRIGGERED = "is_expired() triggered" - EMPTY_ACCESS_TOKEN = "Give access token is empty" - INVALID_TOKEN = "Given token is invalid" - UPDATE_TRIGGERED = "Update method triggered" - UPDATE_DATA_SUCCESS = "Data has been updated successfully" - GET_TRIGGERED = "Get triggered." - GET_SUCCESS = "Data fetched successfully." - DELETE_TRIGGERED = "Delete triggered." - DELETE_DATA_SUCCESS = "Data has been deleted successfully." - - -class InterfaceName(Enum): - CLIENT = "client" - INSERT = "client.insert" - DETOKENIZE = "client.detokenize" - GET_BY_ID = "client.get_by_id" - GET = "client.get" - UPDATE = "client.update" - INVOKE_CONNECTION = "client.invoke_connection" - QUERY = "client.query" - GENERATE_BEARER_TOKEN = "service_account.generate_bearer_token" - - IS_TOKEN_VALID = "service_account.isTokenValid" - IS_EXPIRED = "service_account.is_expired" - DELETE = "client.delete" - - -def http_build_query(data): - ''' - Creates a form urlencoded string from python dictionary - urllib.urlencode() doesn't encode it in a php-esque way, this function helps in that - ''' - - return urllib.parse.urlencode(r_urlencode(list(), dict(), data)) - - -def r_urlencode(parents, pairs, data): - ''' - convert the python dict recursively into a php style associative dictionary - ''' - if isinstance(data, list) or isinstance(data, tuple): - for i in range(len(data)): - parents.append(i) - r_urlencode(parents, pairs, data[i]) - parents.pop() - elif isinstance(data, dict): - for key, value in data.items(): - parents.append(key) - r_urlencode(parents, pairs, value) - parents.pop() - else: - pairs[render_key(parents)] = str(data) - - return pairs - - -def render_key(parents): - ''' - renders the nested dictionary key as an associative array (php style dict) - ''' - depth, outStr = 0, '' - for x in parents: - s = "[%s]" if depth > 0 or isinstance(x, int) else "%s" - outStr += s % str(x) - depth += 1 - return outStr - -def getMetrics(): - ''' fetch metrics - ''' - sdk_name_version = "skyflow-python@" + SDK_VERSION - - try: - sdk_client_device_model = platform.node() - except Exception: - sdk_client_device_model = "" - - try: - sdk_client_os_details = sys.platform - except Exception: - sdk_client_os_details = "" - - try: - sdk_runtime_details = sys.version - except Exception: - sdk_runtime_details = "" - - details_dic = { - 'sdk_name_version': sdk_name_version, - 'sdk_client_device_model': sdk_client_device_model, - 'sdk_client_os_details': sdk_client_os_details, - 'sdk_runtime_details': "Python " + sdk_runtime_details, - } - return details_dic \ No newline at end of file diff --git a/skyflow/client/__init__.py b/skyflow/client/__init__.py new file mode 100644 index 00000000..246ca2f6 --- /dev/null +++ b/skyflow/client/__init__.py @@ -0,0 +1 @@ +from .skyflow import Skyflow diff --git a/skyflow/client/skyflow.py b/skyflow/client/skyflow.py new file mode 100644 index 00000000..6cba3020 --- /dev/null +++ b/skyflow/client/skyflow.py @@ -0,0 +1,236 @@ +from collections import OrderedDict +from skyflow import LogLevel +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.logger import log_info, Logger, log_error +from skyflow.utils.validations import validate_vault_config, validate_connection_config, validate_update_vault_config, \ + validate_update_connection_config, validate_credentials, validate_log_level +from skyflow.vault.client.client import VaultClient +from skyflow.vault.controller import Vault +from skyflow.vault.controller import Connection + +class Skyflow: + def __init__(self, builder): + self.__builder = builder + log_info(SkyflowMessages.Info.CLIENT_INITIALIZED.value, self.__builder.get_logger()) + + @staticmethod + def builder(): + return Skyflow.Builder() + + def add_vault_config(self, config): + self.__builder._Builder__add_vault_config(config) + return self + + def remove_vault_config(self, vault_id): + self.__builder.remove_vault_config(vault_id) + + def update_vault_config(self,config): + self.__builder.update_vault_config(config) + + def get_vault_config(self, vault_id): + return self.__builder.get_vault_config(vault_id).get("vault_client").get_config() + + def add_connection_config(self, config): + self.__builder._Builder__add_connection_config(config) + return self + + def remove_connection_config(self, connection_id): + self.__builder.remove_connection_config(connection_id) + return self + + def update_connection_config(self, config): + self.__builder.update_connection_config(config) + return self + + def get_connection_config(self, connection_id): + return self.__builder.get_connection_config(connection_id).get("vault_client").get_config() + + def add_skyflow_credentials(self, credentials): + self.__builder._Builder__add_skyflow_credentials(credentials) + return self + + def update_skyflow_credentials(self, credentials): + self.__builder._Builder__add_skyflow_credentials(credentials) + + def set_log_level(self, log_level): + self.__builder._Builder__set_log_level(log_level) + return self + + def update_log_level(self, log_level): + self.__builder._Builder__set_log_level(log_level) + + def vault(self, vault_id = None) -> Vault: + vault_config = self.__builder.get_vault_config(vault_id) + return vault_config.get("controller") + + def connection(self, connection_id = None) -> Connection: + connection_config = self.__builder.get_connection_config(connection_id) + return connection_config.get("controller") + + class Builder: + def __init__(self): + self.__vault_configs = OrderedDict() + self.__vault_list = list() + self.__connection_configs = OrderedDict() + self.__connection_list = list() + self.__skyflow_credentials = None + self.__log_level = LogLevel.ERROR + self.__logger = Logger(LogLevel.ERROR) + + def add_vault_config(self, config): + vault_id = config.get("vault_id") + if not isinstance(vault_id, str) or not vault_id: + raise SkyflowError( + SkyflowMessages.Error.INVALID_VAULT_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + if vault_id in [vault.get("vault_id") for vault in self.__vault_list]: + log_info(SkyflowMessages.Info.VAULT_CONFIG_EXISTS.value.format(vault_id), self.__logger) + raise SkyflowError( + SkyflowMessages.Error.VAULT_ID_ALREADY_EXISTS.value.format(vault_id), + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + + self.__vault_list.append(config) + return self + + def remove_vault_config(self, vault_id): + if vault_id in self.__vault_configs.keys(): + self.__vault_configs.pop(vault_id) + else: + log_error(SkyflowMessages.Error.INVALID_VAULT_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value, + logger = self.__logger) + + def update_vault_config(self, config): + validate_update_vault_config(self.__logger, config) + vault_id = config.get("vault_id") + vault_config = self.__vault_configs[vault_id] + vault_config.get("vault_client").update_config(config) + + def get_vault_config(self, vault_id): + if vault_id is None: + if self.__vault_configs: + return next(iter(self.__vault_configs.values())) + raise SkyflowError(SkyflowMessages.Error.EMPTY_VAULT_CONFIGS.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + if vault_id in self.__vault_configs: + return self.__vault_configs.get(vault_id) + log_info(SkyflowMessages.Info.VAULT_CONFIG_DOES_NOT_EXIST.value.format(vault_id), self.__logger) + raise SkyflowError(SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format(vault_id), SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + + def add_connection_config(self, config): + connection_id = config.get("connection_id") + if not isinstance(connection_id, str) or not connection_id: + raise SkyflowError( + SkyflowMessages.Error.INVALID_CONNECTION_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + if connection_id in [connection.get("connection_id") for connection in self.__connection_list]: + log_info(SkyflowMessages.Info.CONNECTION_CONFIG_EXISTS.value.format(connection_id), self.__logger) + raise SkyflowError( + SkyflowMessages.Error.CONNECTION_ID_ALREADY_EXISTS.value.format(connection_id), + SkyflowMessages.ErrorCodes.INVALID_INPUT.value + ) + self.__connection_list.append(config) + return self + + def remove_connection_config(self, connection_id): + if connection_id in self.__connection_configs.keys(): + self.__connection_configs.pop(connection_id) + else: + log_error(SkyflowMessages.Error.INVALID_CONNECTION_ID.value, + SkyflowMessages.ErrorCodes.INVALID_INPUT.value, + logger = self.__logger) + + def update_connection_config(self, config): + validate_update_connection_config(self.__logger, config) + connection_id = config['connection_id'] + connection_config = self.__connection_configs[connection_id] + connection_config.get("vault_client").update_config(config) + + def get_connection_config(self, connection_id): + if connection_id is None: + if self.__connection_configs: + return next(iter(self.__connection_configs.values())) + + raise SkyflowError(SkyflowMessages.Error.EMPTY_CONNECTION_CONFIGS.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + if connection_id in self.__connection_configs: + return self.__connection_configs.get(connection_id) + log_info(SkyflowMessages.Info.CONNECTION_CONFIG_DOES_NOT_EXIST.value.format(connection_id), self.__logger) + raise SkyflowError(SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format(connection_id), SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + + def add_skyflow_credentials(self, credentials): + self.__skyflow_credentials = credentials + return self + + def set_log_level(self, log_level): + self.__log_level = log_level + return self + + def get_logger(self): + return self.__logger + + def __add_vault_config(self, config): + validate_vault_config(self.__logger, config) + vault_id = config.get("vault_id") + vault_client = VaultClient(config) + self.__vault_configs[vault_id] = { + "vault_client": vault_client, + "controller": Vault(vault_client) + } + log_info(SkyflowMessages.Info.VAULT_CONTROLLER_INITIALIZED.value.format(config.get("vault_id")), self.__logger) + + def __add_connection_config(self, config): + validate_connection_config(self.__logger, config) + connection_id = config.get("connection_id") + vault_client = VaultClient(config) + self.__connection_configs[connection_id] = { + "vault_client": vault_client, + "controller": Connection(vault_client) + } + log_info(SkyflowMessages.Info.CONNECTION_CONTROLLER_INITIALIZED.value.format(config.get("connection_id")), self.__logger) + + def __update_vault_client_logger(self, log_level, logger): + for vault_id, vault_config in self.__vault_configs.items(): + vault_config.get("vault_client").set_logger(log_level,logger) + + for connection_id, connection_config in self.__connection_configs.items(): + connection_config.get("vault_client").set_logger(log_level,logger) + + def __set_log_level(self, log_level): + validate_log_level(self.__logger, log_level) + self.__log_level = log_level + self.__logger.set_log_level(log_level) + self.__update_vault_client_logger(log_level, self.__logger) + log_info(SkyflowMessages.Info.LOGGER_SETUP_DONE.value, self.__logger) + log_info(SkyflowMessages.Info.CURRENT_LOG_LEVEL.value.format(self.__log_level), self.__logger) + + def __add_skyflow_credentials(self, credentials): + if credentials is not None: + self.__skyflow_credentials = credentials + validate_credentials(self.__logger, credentials) + for vault_id, vault_config in self.__vault_configs.items(): + vault_config.get("vault_client").set_common_skyflow_credentials(credentials) + + for connection_id, connection_config in self.__connection_configs.items(): + connection_config.get("vault_client").set_common_skyflow_credentials(self.__skyflow_credentials) + def build(self): + validate_log_level(self.__logger, self.__log_level) + self.__logger.set_log_level(self.__log_level) + + for config in self.__vault_list: + self.__add_vault_config(config) + + for config in self.__connection_list: + self.__add_connection_config(config) + + self.__update_vault_client_logger(self.__log_level, self.__logger) + + self.__add_skyflow_credentials(self.__skyflow_credentials) + + return Skyflow(self) diff --git a/skyflow/error/__init__.py b/skyflow/error/__init__.py new file mode 100644 index 00000000..305c7966 --- /dev/null +++ b/skyflow/error/__init__.py @@ -0,0 +1 @@ +from ._skyflow_error import SkyflowError \ No newline at end of file diff --git a/skyflow/error/_skyflow_error.py b/skyflow/error/_skyflow_error.py new file mode 100644 index 00000000..b379fd03 --- /dev/null +++ b/skyflow/error/_skyflow_error.py @@ -0,0 +1,13 @@ +from skyflow.utils.logger import log_error + +class SkyflowError(Exception): + def __init__(self, + message, + http_code, + request_id = None, + grpc_code = None, + http_status = None, + details = None): + self.message = message + log_error(message, http_code, request_id, grpc_code, http_status, details) + super().__init__() \ No newline at end of file diff --git a/skyflow/errors/__init__.py b/skyflow/errors/__init__.py deleted file mode 100644 index 70f2f68f..00000000 --- a/skyflow/errors/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._skyflow_errors import SkyflowErrorCodes -from ._skyflow_errors import SkyflowError \ No newline at end of file diff --git a/skyflow/errors/_skyflow_errors.py b/skyflow/errors/_skyflow_errors.py deleted file mode 100644 index 2e792812..00000000 --- a/skyflow/errors/_skyflow_errors.py +++ /dev/null @@ -1,120 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from enum import Enum -from skyflow._utils import log_error - - -class SkyflowErrorCodes(Enum): - INVALID_INPUT = 400 - INVALID_INDEX = 404 - SERVER_ERROR = 500 - PARTIAL_SUCCESS = 500 - TOKENS_GET_COLUMN_NOT_SUPPORTED = 400 - REDACTION_WITH_TOKENS_NOT_SUPPORTED = 400 - - -class SkyflowErrorMessages(Enum): - API_ERROR = "Server returned status code %s" - - FILE_NOT_FOUND = "File at %s not found" - FILE_INVALID_JSON = "File at %s is not in JSON format" - INVALID_CREDENTIALS = "Given credentials are not valid" - INVALID_URL = "Given url '%s' is invalid" - - MISSING_PRIVATE_KEY = "Unable to read Private key" - MISSING_CLIENT_ID = "Unable to read Client ID" - MISSING_KEY_ID = "Unable to read Key ID" - MISSING_TOKEN_URI = "Unable to read Token URI" - - JWT_INVALID_FORMAT = "Private key is not in correct format" - MISSING_ACCESS_TOKEN = "accessToken not present in response" - MISSING_TOKEN_TYPE = "tokenType not present in response" - JWT_DECODE_ERROR = "Invalid access token" - - # vault - RECORDS_KEY_ERROR = "Records key is missing from payload" - FIELDS_KEY_ERROR = "Fields key is missing from payload" - TABLE_KEY_ERROR = "Table key is missing from payload" - TOKEN_KEY_ERROR = "Token key is missing from payload" - IDS_KEY_ERROR = "Id(s) key is missing from payload" - REDACTION_KEY_ERROR = "Redaction key is missing from payload" - UNIQUE_COLUMN_OR_IDS_KEY_ERROR = "Ids or Unique column key is missing from payload" - UPDATE_FIELD_KEY_ERROR = "Atleast one field should be provided to update" - - INVALID_JSON = "Given %s is invalid JSON" - INVALID_RECORDS_TYPE = "Records key has value of type %s, expected list" - INVALID_FIELDS_TYPE = "Fields key has value of type %s, expected dict" - INVALID_TOKENS_TYPE = "Tokens key has value of type %s, expected dict" - EMPTY_TOKENS_IN_INSERT = "Tokens is empty in records" - MISMATCH_OF_FIELDS_AND_TOKENS = "Fields and Tokens object are not matching" - INVALID_TABLE_TYPE = "Table key has value of type %s, expected string" - INVALID_TABLE_TYPE_DELETE = "Table of type string is required at index %s in records array" - INVALID_IDS_TYPE = "Ids key has value of type %s, expected list" - INVALID_ID_TYPE = "Id key has value of type %s, expected string" - INVALID_ID_TYPE_DELETE = "Id of type string is required at index %s in records array" - INVALID_REDACTION_TYPE = "Redaction key has value of type %s, expected Skyflow.Redaction" - INVALID_COLUMN_NAME = "Column name has value of type %s, expected string" - INVALID_COLUMN_VALUE = "Column values has value of type %s, expected list" - EMPTY_RECORDS_IN_DELETE = "records array cannot be empty" - EMPTY_ID_IN_DELETE = "Id cannot be empty in records array" - EMPTY_TABLE_IN_DELETE = "Table cannot be empty in records array" - RECORDS_KEY_NOT_FOUND_DELETE = "records object is required" - - INVALID_REQUEST_BODY = "Given request body is not valid" - INVALID_RESPONSE_BODY = "Given response body is not valid" - INVALID_HEADERS = "Given Request Headers is not valid" - INVALID_PATH_PARAMS = "Given path params are not valid" - INVALID_QUERY_PARAMS = "Given query params are not valid" - INVALID_PATH_PARAM_TYPE = "Path params (key, value) must be of type 'str' given type - (%s, %s)" - INVALID_QUERY_PARAM_TYPE = "Query params (key, value) must be of type 'str' given type - (%s, %s)" - - INVALID_TOKEN_TYPE = "Token key has value of type %s, expected string" - REDACTION_WITH_TOKENS_NOT_SUPPORTED = "Redaction cannot be used when tokens are true in options" - TOKENS_GET_COLUMN_NOT_SUPPORTED = "Column_name or column_values cannot be used with tokens in options" - BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED = "Both skyflow ids and column details (name and/or values) are specified in payload" - - PARTIAL_SUCCESS = "Server returned errors, check SkyflowError.data for more" - - VAULT_ID_INVALID_TYPE = "Expected Vault ID to be str, got %s" - VAULT_URL_INVALID_TYPE = "Expected Vault URL to be str, got %s" - TOKEN_PROVIDER_ERROR = "Expected Token Provider to be function, got %s" - - EMPTY_VAULT_ID = "Vault ID must not be empty" - EMPTY_VAULT_URL = "Vault URL must not be empty" - RESPONSE_NOT_JSON = "Response %s is not valid JSON" - - TOKEN_PROVIDER_INVALID_TOKEN = "Invalid token from tokenProvider" - INVALID_UPSERT_OPTIONS_TYPE = "upsertOptions key has value of type %s, expected list" - EMPTY_UPSERT_OPTIONS_LIST = "upsert option cannot be an empty array, atleast one object of table and column is required" - INVALID_UPSERT_TABLE_TYPE = "upsert object table key has value of type %s, expected string" - INVALID_UPSERT_COLUMN_TYPE = "upsert object column key has value of type %s, expected string" - EMPTY_UPSERT_OPTION_TABLE = "upsert object table value is empty string at index %s, expected non-empty string" - EMPTY_UPSERT_OPTION_COLUMN = "upsert object column value is empty string at index %s, expected non-empty string" - QUERY_KEY_ERROR = "Query key is missing from payload" - INVALID_QUERY_TYPE = "Query key has value of type %s, expected string" - EMPTY_QUERY = "Query key cannot be empty" - INVALID_QUERY_COMMAND = "only SELECT commands are supported, %s command was passed instead" - SERVER_ERROR = "Server returned errors, check SkyflowError.data for more" - - BATCH_INSERT_PARTIAL_SUCCESS = "Insert Operation is partially successful" - BATCH_INSERT_FAILURE = "Insert Operation is unsuccessful" - - INVALID_BYOT_TYPE = "byot option has value of type %s, expected Skyflow.BYOT" - NO_TOKENS_IN_INSERT = "Tokens are not passed in records for byot as %s" - TOKENS_PASSED_FOR_BYOT_DISABLE = "Pass byot parameter with ENABLE for token insertion" - INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT = "For byot as ENABLE_STRICT, tokens should be passed for all fields" - -class SkyflowError(Exception): - def __init__(self, code, message="An Error occured", data={}, interface: str = 'Unknown') -> None: - if type(code) is SkyflowErrorCodes: - self.code = code.value - else: - self.code = code - if type(message) is SkyflowErrorMessages: - self.message = message.value - else: - self.message = message - log_error(self.message, interface) - self.data = data - super().__init__(self.message) diff --git a/tests/service_account/data/empty.json b/skyflow/generated/__init__.py similarity index 100% rename from tests/service_account/data/empty.json rename to skyflow/generated/__init__.py diff --git a/skyflow/generated/rest/__init__.py b/skyflow/generated/rest/__init__.py new file mode 100644 index 00000000..1544b853 --- /dev/null +++ b/skyflow/generated/rest/__init__.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +# flake8: noqa + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +__version__ = "1.0.0" + +# import apis into sdk package +from skyflow.generated.rest.api.audit_api import AuditApi +from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi +from skyflow.generated.rest.api.query_api import QueryApi +from skyflow.generated.rest.api.records_api import RecordsApi +from skyflow.generated.rest.api.tokens_api import TokensApi + +# import ApiClient +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.api_client import ApiClient +from skyflow.generated.rest.configuration import Configuration +from skyflow.generated.rest.exceptions import OpenApiException +from skyflow.generated.rest.exceptions import ApiTypeError +from skyflow.generated.rest.exceptions import ApiValueError +from skyflow.generated.rest.exceptions import ApiKeyError +from skyflow.generated.rest.exceptions import ApiAttributeError +from skyflow.generated.rest.exceptions import ApiException + +# import models into sdk package +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from skyflow.generated.rest.models.request_action_type import RequestActionType +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_card import V1Card +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig diff --git a/skyflow/generated/rest/api/__init__.py b/skyflow/generated/rest/api/__init__.py new file mode 100644 index 00000000..01b15fdb --- /dev/null +++ b/skyflow/generated/rest/api/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +# import apis into api package +from skyflow.generated.rest.api.audit_api import AuditApi +from skyflow.generated.rest.api.bin_lookup_api import BINLookupApi +from skyflow.generated.rest.api.query_api import QueryApi +from skyflow.generated.rest.api.records_api import RecordsApi +from skyflow.generated.rest.api.tokens_api import TokensApi +from skyflow.generated.rest.api.authentication_api import AuthenticationApi diff --git a/skyflow/generated/rest/api/audit_api.py b/skyflow/generated/rest/api/audit_api.py new file mode 100644 index 00000000..dc6de1fe --- /dev/null +++ b/skyflow/generated/rest/api/audit_api.py @@ -0,0 +1,848 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictInt, StrictStr, field_validator +from typing import Optional +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class AuditApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def audit_service_list_audit_events( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1AuditResponse: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def audit_service_list_audit_events_with_http_info( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1AuditResponse]: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def audit_service_list_audit_events_without_preload_content( + self, + filter_ops_account_id: Annotated[StrictStr, Field(description="Resources with the specified account ID.")], + filter_ops_context_change_id: Annotated[Optional[StrictStr], Field(description="ID for the audit event.")] = None, + filter_ops_context_request_id: Annotated[Optional[StrictStr], Field(description="ID for the request that caused the event.")] = None, + filter_ops_context_trace_id: Annotated[Optional[StrictStr], Field(description="ID for the request set by the service that received the request.")] = None, + filter_ops_context_session_id: Annotated[Optional[StrictStr], Field(description="ID for the session in which the request was sent.")] = None, + filter_ops_context_actor: Annotated[Optional[StrictStr], Field(description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.")] = None, + filter_ops_context_actor_type: Annotated[Optional[StrictStr], Field(description="Type of member who sent the request.")] = None, + filter_ops_context_access_type: Annotated[Optional[StrictStr], Field(description="Type of access for the request.")] = None, + filter_ops_context_ip_address: Annotated[Optional[StrictStr], Field(description="IP Address of the client that made the request.")] = None, + filter_ops_context_origin: Annotated[Optional[StrictStr], Field(description="HTTP Origin request header (including scheme, hostname, and port) of the request.")] = None, + filter_ops_context_auth_mode: Annotated[Optional[StrictStr], Field(description="Authentication mode the `actor` used.")] = None, + filter_ops_context_jwt_id: Annotated[Optional[StrictStr], Field(description="ID of the JWT token.")] = None, + filter_ops_context_bearer_token_context_id: Annotated[Optional[StrictStr], Field(description="Embedded User Context.")] = None, + filter_ops_parent_account_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified parent account ID.")] = None, + filter_ops_workspace_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified workspace ID.")] = None, + filter_ops_vault_id: Annotated[Optional[StrictStr], Field(description="Resources with the specified vault ID.")] = None, + filter_ops_resource_ids: Annotated[Optional[StrictStr], Field(description="Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\".")] = None, + filter_ops_action_type: Annotated[Optional[StrictStr], Field(description="Events with the specified action type.")] = None, + filter_ops_resource_type: Annotated[Optional[StrictStr], Field(description="Resources with the specified type.")] = None, + filter_ops_tags: Annotated[Optional[StrictStr], Field(description="Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\".")] = None, + filter_ops_response_code: Annotated[Optional[StrictInt], Field(description="HTTP response code of the request.")] = None, + filter_ops_start_time: Annotated[Optional[StrictStr], Field(description="Start timestamp for the query, in SQL format.")] = None, + filter_ops_end_time: Annotated[Optional[StrictStr], Field(description="End timestamp for the query, in SQL format.")] = None, + filter_ops_api_name: Annotated[Optional[StrictStr], Field(description="Name of the API called in the request.")] = None, + filter_ops_response_message: Annotated[Optional[StrictStr], Field(description="Response message of the request.")] = None, + filter_ops_http_method: Annotated[Optional[StrictStr], Field(description="HTTP method of the request.")] = None, + filter_ops_http_uri: Annotated[Optional[StrictStr], Field(description="HTTP URI of the request.")] = None, + sort_ops_sort_by: Annotated[Optional[StrictStr], Field(description="Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\").")] = None, + sort_ops_order_by: Annotated[Optional[StrictStr], Field(description="Ascending or descending ordering of results.")] = None, + after_ops_timestamp: Annotated[Optional[StrictStr], Field(description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + after_ops_change_id: Annotated[Optional[StrictStr], Field(description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.")] = None, + limit: Annotated[Optional[StrictInt], Field(description="Number of results to return.")] = None, + offset: Annotated[Optional[StrictInt], Field(description="Record position at which to start returning results.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Audit Events + + Lists audit events that match query parameters. + + :param filter_ops_account_id: Resources with the specified account ID. (required) + :type filter_ops_account_id: str + :param filter_ops_context_change_id: ID for the audit event. + :type filter_ops_context_change_id: str + :param filter_ops_context_request_id: ID for the request that caused the event. + :type filter_ops_context_request_id: str + :param filter_ops_context_trace_id: ID for the request set by the service that received the request. + :type filter_ops_context_trace_id: str + :param filter_ops_context_session_id: ID for the session in which the request was sent. + :type filter_ops_context_session_id: str + :param filter_ops_context_actor: Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID. + :type filter_ops_context_actor: str + :param filter_ops_context_actor_type: Type of member who sent the request. + :type filter_ops_context_actor_type: str + :param filter_ops_context_access_type: Type of access for the request. + :type filter_ops_context_access_type: str + :param filter_ops_context_ip_address: IP Address of the client that made the request. + :type filter_ops_context_ip_address: str + :param filter_ops_context_origin: HTTP Origin request header (including scheme, hostname, and port) of the request. + :type filter_ops_context_origin: str + :param filter_ops_context_auth_mode: Authentication mode the `actor` used. + :type filter_ops_context_auth_mode: str + :param filter_ops_context_jwt_id: ID of the JWT token. + :type filter_ops_context_jwt_id: str + :param filter_ops_context_bearer_token_context_id: Embedded User Context. + :type filter_ops_context_bearer_token_context_id: str + :param filter_ops_parent_account_id: Resources with the specified parent account ID. + :type filter_ops_parent_account_id: str + :param filter_ops_workspace_id: Resources with the specified workspace ID. + :type filter_ops_workspace_id: str + :param filter_ops_vault_id: Resources with the specified vault ID. + :type filter_ops_vault_id: str + :param filter_ops_resource_ids: Resources with a specified ID. If a resource matches at least one ID, the associated event is returned. Format is a comma-separated list of \"\\/\\\". For example, \"VAULT/12345, USER/67890\". + :type filter_ops_resource_ids: str + :param filter_ops_action_type: Events with the specified action type. + :type filter_ops_action_type: str + :param filter_ops_resource_type: Resources with the specified type. + :type filter_ops_resource_type: str + :param filter_ops_tags: Events with associated tags. If an event matches at least one tag, the event is returned. Comma-separated list. For example, \"login, get\". + :type filter_ops_tags: str + :param filter_ops_response_code: HTTP response code of the request. + :type filter_ops_response_code: int + :param filter_ops_start_time: Start timestamp for the query, in SQL format. + :type filter_ops_start_time: str + :param filter_ops_end_time: End timestamp for the query, in SQL format. + :type filter_ops_end_time: str + :param filter_ops_api_name: Name of the API called in the request. + :type filter_ops_api_name: str + :param filter_ops_response_message: Response message of the request. + :type filter_ops_response_message: str + :param filter_ops_http_method: HTTP method of the request. + :type filter_ops_http_method: str + :param filter_ops_http_uri: HTTP URI of the request. + :type filter_ops_http_uri: str + :param sort_ops_sort_by: Fully-qualified field by which to sort results. Field names should be in camel case (for example, \"capitalization.camelCase\"). + :type sort_ops_sort_by: str + :param sort_ops_order_by: Ascending or descending ordering of results. + :type sort_ops_order_by: str + :param after_ops_timestamp: Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_timestamp: str + :param after_ops_change_id: Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank. + :type after_ops_change_id: str + :param limit: Number of results to return. + :type limit: int + :param offset: Record position at which to start returning results. + :type offset: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._audit_service_list_audit_events_serialize( + filter_ops_account_id=filter_ops_account_id, + filter_ops_context_change_id=filter_ops_context_change_id, + filter_ops_context_request_id=filter_ops_context_request_id, + filter_ops_context_trace_id=filter_ops_context_trace_id, + filter_ops_context_session_id=filter_ops_context_session_id, + filter_ops_context_actor=filter_ops_context_actor, + filter_ops_context_actor_type=filter_ops_context_actor_type, + filter_ops_context_access_type=filter_ops_context_access_type, + filter_ops_context_ip_address=filter_ops_context_ip_address, + filter_ops_context_origin=filter_ops_context_origin, + filter_ops_context_auth_mode=filter_ops_context_auth_mode, + filter_ops_context_jwt_id=filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id=filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id=filter_ops_parent_account_id, + filter_ops_workspace_id=filter_ops_workspace_id, + filter_ops_vault_id=filter_ops_vault_id, + filter_ops_resource_ids=filter_ops_resource_ids, + filter_ops_action_type=filter_ops_action_type, + filter_ops_resource_type=filter_ops_resource_type, + filter_ops_tags=filter_ops_tags, + filter_ops_response_code=filter_ops_response_code, + filter_ops_start_time=filter_ops_start_time, + filter_ops_end_time=filter_ops_end_time, + filter_ops_api_name=filter_ops_api_name, + filter_ops_response_message=filter_ops_response_message, + filter_ops_http_method=filter_ops_http_method, + filter_ops_http_uri=filter_ops_http_uri, + sort_ops_sort_by=sort_ops_sort_by, + sort_ops_order_by=sort_ops_order_by, + after_ops_timestamp=after_ops_timestamp, + after_ops_change_id=after_ops_change_id, + limit=limit, + offset=offset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1AuditResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _audit_service_list_audit_events_serialize( + self, + filter_ops_account_id, + filter_ops_context_change_id, + filter_ops_context_request_id, + filter_ops_context_trace_id, + filter_ops_context_session_id, + filter_ops_context_actor, + filter_ops_context_actor_type, + filter_ops_context_access_type, + filter_ops_context_ip_address, + filter_ops_context_origin, + filter_ops_context_auth_mode, + filter_ops_context_jwt_id, + filter_ops_context_bearer_token_context_id, + filter_ops_parent_account_id, + filter_ops_workspace_id, + filter_ops_vault_id, + filter_ops_resource_ids, + filter_ops_action_type, + filter_ops_resource_type, + filter_ops_tags, + filter_ops_response_code, + filter_ops_start_time, + filter_ops_end_time, + filter_ops_api_name, + filter_ops_response_message, + filter_ops_http_method, + filter_ops_http_uri, + sort_ops_sort_by, + sort_ops_order_by, + after_ops_timestamp, + after_ops_change_id, + limit, + offset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filter_ops_context_change_id is not None: + + _query_params.append(('filterOps.context.changeID', filter_ops_context_change_id)) + + if filter_ops_context_request_id is not None: + + _query_params.append(('filterOps.context.requestID', filter_ops_context_request_id)) + + if filter_ops_context_trace_id is not None: + + _query_params.append(('filterOps.context.traceID', filter_ops_context_trace_id)) + + if filter_ops_context_session_id is not None: + + _query_params.append(('filterOps.context.sessionID', filter_ops_context_session_id)) + + if filter_ops_context_actor is not None: + + _query_params.append(('filterOps.context.actor', filter_ops_context_actor)) + + if filter_ops_context_actor_type is not None: + + _query_params.append(('filterOps.context.actorType', filter_ops_context_actor_type)) + + if filter_ops_context_access_type is not None: + + _query_params.append(('filterOps.context.accessType', filter_ops_context_access_type)) + + if filter_ops_context_ip_address is not None: + + _query_params.append(('filterOps.context.ipAddress', filter_ops_context_ip_address)) + + if filter_ops_context_origin is not None: + + _query_params.append(('filterOps.context.origin', filter_ops_context_origin)) + + if filter_ops_context_auth_mode is not None: + + _query_params.append(('filterOps.context.authMode', filter_ops_context_auth_mode)) + + if filter_ops_context_jwt_id is not None: + + _query_params.append(('filterOps.context.jwtID', filter_ops_context_jwt_id)) + + if filter_ops_context_bearer_token_context_id is not None: + + _query_params.append(('filterOps.context.bearerTokenContextID', filter_ops_context_bearer_token_context_id)) + + if filter_ops_parent_account_id is not None: + + _query_params.append(('filterOps.parentAccountID', filter_ops_parent_account_id)) + + if filter_ops_account_id is not None: + + _query_params.append(('filterOps.accountID', filter_ops_account_id)) + + if filter_ops_workspace_id is not None: + + _query_params.append(('filterOps.workspaceID', filter_ops_workspace_id)) + + if filter_ops_vault_id is not None: + + _query_params.append(('filterOps.vaultID', filter_ops_vault_id)) + + if filter_ops_resource_ids is not None: + + _query_params.append(('filterOps.resourceIDs', filter_ops_resource_ids)) + + if filter_ops_action_type is not None: + + _query_params.append(('filterOps.actionType', filter_ops_action_type)) + + if filter_ops_resource_type is not None: + + _query_params.append(('filterOps.resourceType', filter_ops_resource_type)) + + if filter_ops_tags is not None: + + _query_params.append(('filterOps.tags', filter_ops_tags)) + + if filter_ops_response_code is not None: + + _query_params.append(('filterOps.responseCode', filter_ops_response_code)) + + if filter_ops_start_time is not None: + + _query_params.append(('filterOps.startTime', filter_ops_start_time)) + + if filter_ops_end_time is not None: + + _query_params.append(('filterOps.endTime', filter_ops_end_time)) + + if filter_ops_api_name is not None: + + _query_params.append(('filterOps.apiName', filter_ops_api_name)) + + if filter_ops_response_message is not None: + + _query_params.append(('filterOps.responseMessage', filter_ops_response_message)) + + if filter_ops_http_method is not None: + + _query_params.append(('filterOps.httpMethod', filter_ops_http_method)) + + if filter_ops_http_uri is not None: + + _query_params.append(('filterOps.httpURI', filter_ops_http_uri)) + + if sort_ops_sort_by is not None: + + _query_params.append(('sortOps.sortBy', sort_ops_sort_by)) + + if sort_ops_order_by is not None: + + _query_params.append(('sortOps.orderBy', sort_ops_order_by)) + + if after_ops_timestamp is not None: + + _query_params.append(('afterOps.timestamp', after_ops_timestamp)) + + if after_ops_change_id is not None: + + _query_params.append(('afterOps.changeID', after_ops_change_id)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/audit/events', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/authentication_api.py b/skyflow/generated/rest/api/authentication_api.py new file mode 100644 index 00000000..8abbbf67 --- /dev/null +++ b/skyflow/generated/rest/api/authentication_api.py @@ -0,0 +1,319 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest +from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class AuthenticationApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def authentication_service_get_auth_token( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetAuthTokenResponse: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def authentication_service_get_auth_token_with_http_info( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetAuthTokenResponse]: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def authentication_service_get_auth_token_without_preload_content( + self, + body: V1GetAuthTokenRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Bearer Token + +

Generates a Bearer Token to authenticate with Skyflow. This method doesn't require the Authorization header.

Note: For recommended ways to authenticate, see API authentication.

+ + :param body: (required) + :type body: V1GetAuthTokenRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authentication_service_get_auth_token_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetAuthTokenResponse", + '400': "object", + '401': "object", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _authentication_service_get_auth_token_serialize( + self, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/auth/sa/oauth/token', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/bin_lookup_api.py b/skyflow/generated/rest/api/bin_lookup_api.py new file mode 100644 index 00000000..1bb3e64b --- /dev/null +++ b/skyflow/generated/rest/api/bin_lookup_api.py @@ -0,0 +1,315 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class BINLookupApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def b_in_list_service_list_cards_of_bin( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BINListResponse: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def b_in_list_service_list_cards_of_bin_with_http_info( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BINListResponse]: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def b_in_list_service_list_cards_of_bin_without_preload_content( + self, + body: Annotated[V1BINListRequest, Field(description="Request to return specific card metadata.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get BIN + + Note: This endpoint is in beta and subject to change.

Returns the specified card metadata. + + :param body: Request to return specific card metadata. (required) + :type body: V1BINListRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._b_in_list_service_list_cards_of_bin_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BINListResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _b_in_list_service_list_cards_of_bin_serialize( + self, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/card_lookup', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/query_api.py b/skyflow/generated/rest/api/query_api.py new file mode 100644 index 00000000..edf04f27 --- /dev/null +++ b/skyflow/generated/rest/api/query_api.py @@ -0,0 +1,330 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class QueryApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def query_service_execute_query( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetQueryResponse: + """Execute Query + + Returns records for a valid SQL query. This endpoint
  • Can return redacted record values.
  • Supports only the SELECT command.
  • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
  • Can't modify the vault or perform transactions.
  • Can't return tokens.
  • Can't return file download or render URLs.
  • Doesn't support the WHERE keyword with columns using transient tokenization.
  • Doesn't support `?` conditional for columns with column-level encryption disabled.
    • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def query_service_execute_query_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetQueryResponse]: + """Execute Query + + Returns records for a valid SQL query. This endpoint
      • Can return redacted record values.
      • Supports only the SELECT command.
      • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
      • Can't modify the vault or perform transactions.
      • Can't return tokens.
      • Can't return file download or render URLs.
      • Doesn't support the WHERE keyword with columns using transient tokenization.
      • Doesn't support `?` conditional for columns with column-level encryption disabled.
        • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def query_service_execute_query_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: QueryServiceExecuteQueryBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Execute Query + + Returns records for a valid SQL query. This endpoint
          • Can return redacted record values.
          • Supports only the SELECT command.
          • Returns a maximum of 25 records. To return additional records, perform another query using the OFFSET keyword.
          • Can't modify the vault or perform transactions.
          • Can't return tokens.
          • Can't return file download or render URLs.
          • Doesn't support the WHERE keyword with columns using transient tokenization.
          • Doesn't support `?` conditional for columns with column-level encryption disabled.
            • + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: QueryServiceExecuteQueryBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._query_service_execute_query_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetQueryResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _query_service_execute_query_serialize( + self, + vault_id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/query', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/records_api.py b/skyflow/generated/rest/api/records_api.py new file mode 100644 index 00000000..ae9a2c29 --- /dev/null +++ b/skyflow/generated/rest/api/records_api.py @@ -0,0 +1,3310 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
              • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
              • Production: https://*identifier*.vault.skyflowapis.com
              When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictBytes, StrictStr, field_validator +from typing import List, Optional, Union +from typing_extensions import Annotated +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class RecordsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def file_service_delete_file( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DeleteFileResponse: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_delete_file_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DeleteFileResponse]: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_delete_file_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete File + + Deletes a file from the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_delete_file_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteFileResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_delete_file_serialize( + self, + vault_id, + table_name, + id, + column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if table_name is not None: + _path_params['tableName'] = table_name + if id is not None: + _path_params['ID'] = id + if column_name is not None: + _path_params['columnName'] = column_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def file_service_get_file_scan_status( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1GetFileScanStatusResponse: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_get_file_scan_status_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1GetFileScanStatusResponse]: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_get_file_scan_status_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + table_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + column_name: Annotated[StrictStr, Field(description="Name of the column that contains the file.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get File Scan Status + + Returns the anti-virus scan status of a file. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param table_name: Name of the table. (required) + :type table_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param column_name: Name of the column that contains the file. (required) + :type column_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_get_file_scan_status_serialize( + vault_id=vault_id, + table_name=table_name, + id=id, + column_name=column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1GetFileScanStatusResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_get_file_scan_status_serialize( + self, + vault_id, + table_name, + id, + column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if table_name is not None: + _path_params['tableName'] = table_name + if id is not None: + _path_params['ID'] = id + if column_name is not None: + _path_params['columnName'] = column_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{tableName}/{ID}/files/{columnName}/scan-status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def file_service_upload_file( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1UpdateRecordResponse: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def file_service_upload_file_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1UpdateRecordResponse]: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def file_service_upload_file_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + file_column_name: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="Name of the column to store the file in. The column must have a file data type.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Upload File + + Uploads a file to the specified record. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param file_column_name: Name of the column to store the file in. The column must have a file data type. + :type file_column_name: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._file_service_upload_file_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + file_column_name=file_column_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _file_service_upload_file_serialize( + self, + vault_id, + object_name, + id, + file_column_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + if file_column_name is not None: + _files['fileColumnName'] = file_column_name + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}/files', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_batch_operation( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BatchOperationResponse: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '207': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_batch_operation_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BatchOperationResponse]: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_batch_operation_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + body: RecordServiceBatchOperationBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Batch Operation + + Performs multiple record operations in a single transaction. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param body: (required) + :type body: RecordServiceBatchOperationBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_batch_operation_serialize( + vault_id=vault_id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BatchOperationResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_batch_operation_serialize( + self, + vault_id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_bulk_delete_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BulkDeleteRecordResponse: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_bulk_delete_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BulkDeleteRecordResponse]: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_bulk_delete_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceBulkDeleteRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Bulk Delete Records + + Deletes the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceBulkDeleteRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkDeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_bulk_delete_record_serialize( + self, + vault_id, + object_name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_bulk_get_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1BulkGetRecordResponse: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_bulk_get_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1BulkGetRecordResponse]: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_bulk_get_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table that contains the records.")], + skyflow_ids: Annotated[Optional[List[StrictStr]], Field(description="`skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table.")] = None, + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned records. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + offset: Annotated[Optional[StrictStr], Field(description="Record position at which to start receiving data.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="Number of record to return. Maximum 25.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + column_name: Annotated[Optional[StrictStr], Field(description="Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + column_values: Annotated[Optional[List[StrictStr]], Field(description="Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error.")] = None, + order_by: Annotated[Optional[StrictStr], Field(description="Order to return records, based on `skyflow_id` values. To disable, set to `NONE`.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Record(s) + + Gets the specified records from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table that contains the records. (required) + :type object_name: str + :param skyflow_ids: `skyflow_id` values of the records to return, with one value per `skyflow_ids` URL parameter. For example, `?skyflow_ids=abc&skyflow_ids=123`.

If not specified, returns the first 25 records in the table. + :type skyflow_ids: List[str] + :param redaction: Redaction level to enforce for the returned records. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param offset: Record position at which to start receiving data. + :type offset: str + :param limit: Number of record to return. Maximum 25. + :type limit: str + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param column_name: Name of the column. It must be configured as unique in the schema. If you provide both column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_name: str + :param column_values: Column values of the records to return, with one value per `column_values` URL parameter. For example, `?column_values=abc&column_values=123`.

`column_name` is mandatory when providing `column_values`. If you use column name or column value, you cannot use `skyflow_ids`. Passing either of these parameters with `skyflow_ids` returns an error. + :type column_values: List[str] + :param order_by: Order to return records, based on `skyflow_id` values. To disable, set to `NONE`. + :type order_by: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_bulk_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + skyflow_ids=skyflow_ids, + redaction=redaction, + tokenization=tokenization, + fields=fields, + offset=offset, + limit=limit, + download_url=download_url, + column_name=column_name, + column_values=column_values, + order_by=order_by, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1BulkGetRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_bulk_get_record_serialize( + self, + vault_id, + object_name, + skyflow_ids, + redaction, + tokenization, + fields, + offset, + limit, + download_url, + column_name, + column_values, + order_by, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'skyflow_ids': 'multi', + 'fields': 'multi', + 'column_values': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + if skyflow_ids is not None: + + _query_params.append(('skyflow_ids', skyflow_ids)) + + if redaction is not None: + + _query_params.append(('redaction', redaction)) + + if tokenization is not None: + + _query_params.append(('tokenization', tokenization)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + if offset is not None: + + _query_params.append(('offset', offset)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if download_url is not None: + + _query_params.append(('downloadURL', download_url)) + + if column_name is not None: + + _query_params.append(('column_name', column_name)) + + if column_values is not None: + + _query_params.append(('column_values', column_values)) + + if order_by is not None: + + _query_params.append(('order_by', order_by)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_delete_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DeleteRecordResponse: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_delete_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DeleteRecordResponse]: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_delete_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record to delete.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Record + + Deletes the specified record from a table.

Note: This method doesn't delete transient field tokens. Transient field values are available until they expire based on the fields' time-to-live (TTL) setting. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record to delete. (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_delete_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DeleteRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_delete_record_serialize( + self, + vault_id, + object_name, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_get_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1FieldRecords: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_get_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1FieldRecords]: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_get_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + redaction: Annotated[Optional[StrictStr], Field(description="Redaction level to enforce for the returned record. Subject to policies assigned to the API caller.")] = None, + tokenization: Annotated[Optional[StrictBool], Field(description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.")] = None, + fields: Annotated[Optional[List[StrictStr]], Field(description="Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields.")] = None, + download_url: Annotated[Optional[StrictBool], Field(description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Record By ID + + Returns the specified record from a table. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param redaction: Redaction level to enforce for the returned record. Subject to policies assigned to the API caller. + :type redaction: str + :param tokenization: If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified. + :type tokenization: bool + :param fields: Fields to return for the record, with one value per `fields` URL parameter. For example, `?fields=abc&fields=123`.

If not specified, returns all fields. + :type fields: List[str] + :param download_url: If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean. + :type download_url: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_get_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + redaction=redaction, + tokenization=tokenization, + fields=fields, + download_url=download_url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1FieldRecords", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_get_record_serialize( + self, + vault_id, + object_name, + id, + redaction, + tokenization, + fields, + download_url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'fields': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + if redaction is not None: + + _query_params.append(('redaction', redaction)) + + if tokenization is not None: + + _query_params.append(('tokenization', tokenization)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + if download_url is not None: + + _query_params.append(('downloadURL', download_url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_insert_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1InsertRecordResponse: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_insert_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1InsertRecordResponse]: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_insert_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + body: RecordServiceInsertRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Insert Records + + Inserts a record in the specified table.

The time-to-live (TTL) for a transient field begins when the field value is set during record insertion.

Columns that have a string data type and a uniqueness constraint accept strings up to 2500 characters. If an inserted string exceeds 2500 characters, the call returns a token insertion error. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param body: (required) + :type body: RecordServiceInsertRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_insert_record_serialize( + vault_id=vault_id, + object_name=object_name, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1InsertRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_insert_record_serialize( + self, + vault_id, + object_name, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/{objectName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_update_record( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1UpdateRecordResponse: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_update_record_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1UpdateRecordResponse]: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_update_record_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + object_name: Annotated[StrictStr, Field(description="Name of the table.")], + id: Annotated[StrictStr, Field(description="`skyflow_id` of the record.")], + body: RecordServiceUpdateRecordBody, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update Record + + Updates the specified record in a table.

When you update a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.

The time-to-live (TTL) for a transient field resets when the field value is updated. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param object_name: Name of the table. (required) + :type object_name: str + :param id: `skyflow_id` of the record. (required) + :type id: str + :param body: (required) + :type body: RecordServiceUpdateRecordBody + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_update_record_serialize( + vault_id=vault_id, + object_name=object_name, + id=id, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1UpdateRecordResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_update_record_serialize( + self, + vault_id, + object_name, + id, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + if object_name is not None: + _path_params['objectName'] = object_name + if id is not None: + _path_params['ID'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/v1/vaults/{vaultID}/{objectName}/{ID}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api/tokens_api.py b/skyflow/generated/rest/api/tokens_api.py new file mode 100644 index 00000000..e21e7935 --- /dev/null +++ b/skyflow/generated/rest/api/tokens_api.py @@ -0,0 +1,623 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse + +from skyflow.generated.rest.api_client import ApiClient, RequestSerialized +from skyflow.generated.rest.api_response import ApiResponse +from skyflow.generated.rest.rest import RESTResponseType + + +class TokensApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def record_service_detokenize( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1DetokenizeResponse: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '207': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_detokenize_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1DetokenizeResponse]: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_detokenize_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + detokenize_payload: V1DetokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Detokenize + + Returns records that correspond to the specified tokens. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param detokenize_payload: (required) + :type detokenize_payload: V1DetokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_detokenize_serialize( + vault_id=vault_id, + detokenize_payload=detokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1DetokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_detokenize_serialize( + self, + vault_id, + detokenize_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if detokenize_payload is not None: + _body_params = detokenize_payload + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/detokenize', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def record_service_tokenize( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1TokenizeResponse: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def record_service_tokenize_with_http_info( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1TokenizeResponse]: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def record_service_tokenize_without_preload_content( + self, + vault_id: Annotated[StrictStr, Field(description="ID of the vault.")], + tokenize_payload: V1TokenizePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Tokenize + + Returns tokens that correspond to the specified records. Only applicable for fields with deterministic tokenization.

Note: This endpoint doesn't insert records—it returns tokens for existing values. To insert records and tokenize that new record's values, see Insert Record and the tokenization parameter. + + :param vault_id: ID of the vault. (required) + :type vault_id: str + :param tokenize_payload: (required) + :type tokenize_payload: V1TokenizePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._record_service_tokenize_serialize( + vault_id=vault_id, + tokenize_payload=tokenize_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "V1TokenizeResponse", + '404': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _record_service_tokenize_serialize( + self, + vault_id, + tokenize_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if vault_id is not None: + _path_params['vaultID'] = vault_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if tokenize_payload is not None: + _body_params = tokenize_payload + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'Bearer' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/v1/vaults/{vaultID}/tokenize', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/skyflow/generated/rest/api_client.py b/skyflow/generated/rest/api_client.py new file mode 100644 index 00000000..8aa5e6a9 --- /dev/null +++ b/skyflow/generated/rest/api_client.py @@ -0,0 +1,789 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import datetime +from dateutil.parser import parse +from enum import Enum +import decimal +import json +import mimetypes +import os +import re +import tempfile + +from urllib.parse import quote +from typing import Tuple, Optional, List, Dict, Union +from pydantic import SecretStr + +from skyflow.generated.rest.configuration import Configuration +from skyflow.generated.rest.api_response import ApiResponse, T as ApiResponseT +import skyflow.generated.rest.models +from skyflow.generated.rest import rest +from skyflow.generated.rest.exceptions import ( + ApiValueError, + ApiException, + BadRequestException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException +) + +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] + +class ApiClient: + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, # TODO remove as only py3 is supported? + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'decimal': decimal.Decimal, + 'object': object, + } + _pool = None + + def __init__( + self, + configuration=None, + header_name=None, + header_value=None, + cookie=None + ) -> None: + # use default configuration if none is provided + if configuration is None: + configuration = Configuration.get_default() + self.configuration = configuration + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + + def param_serialize( + self, + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None + ) -> RequestSerialized: + + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict( + self.parameters_to_tuples(header_params,collection_formats) + ) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples( + path_params, + collection_formats + ) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples( + post_params, + collection_formats + ) + if files: + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth( + header_params, + query_params, + auth_settings, + resource_path, + method, + body, + request_auth=_request_auth + ) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None or self.configuration.ignore_operation_servers: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query( + query_params, + collection_formats + ) + url += "?" + url_query + + return method, url, header_params, body, post_params + + + def call_api( + self, + method, + url, + header_params=None, + body=None, + post_params=None, + _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + + try: + # perform request and return response + response_data = self.rest_client.request( + method, url, + headers=header_params, + body=body, post_params=post_params, + _request_timeout=_request_timeout + ) + + except ApiException as e: + raise e + + return response_data + + def response_deserialize( + self, + response_data: rest.RESTResponse, + response_types_map: Optional[Dict[str, ApiResponseT]]=None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ + + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + assert response_data.data is not None, msg + + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) + + # deserialize response data + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None + content_type = response_data.getheader('content-type') + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type, content_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) + + return ApiResponse( + status_code = response_data.status, + data = return_data, + headers = response_data.getheaders(), + raw_data = response_data.data + ) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is SecretStr, return obj.get_secret_value() + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is decimal.Decimal return string representation. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, Enum): + return obj.value + elif isinstance(obj, SecretStr): + return obj.get_secret_value() + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [ + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ] + elif isinstance(obj, tuple): + return tuple( + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + elif isinstance(obj, decimal.Decimal): + return str(obj) + + elif isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')): + obj_dict = obj.to_dict() + else: + obj_dict = obj.__dict__ + + return { + key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items() + } + + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + :param content_type: content type of response. + + :return: deserialized object. + """ + + # fetch data from response object + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif content_type.startswith("application/json"): + if response_text == "": + data = "" + else: + data = json.loads(response_text) + elif content_type.startswith("text/plain"): + data = response_text + else: + raise ApiException( + status=0, + reason="Unsupported content type: {0}".format(content_type) + ) + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith('List['): + m = re.match(r'List\[(.*)]', klass) + assert m is not None, "Malformed List type definition" + sub_kls = m.group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('Dict['): + m = re.match(r'Dict\[([^,]*), (.*)]', klass) + assert m is not None, "Malformed Dict type definition" + sub_kls = m.group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(skyflow.generated.rest.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + elif klass == decimal.Decimal: + return decimal.Decimal(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) + else: + return self.__deserialize_model(data, klass) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, str(value)) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(quote(str(value)) for value in v)) + ) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(map(str, item)) for item in new_params]) + + def files_parameters(self, files: Dict[str, Union[str, bytes]]): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + for k, v in files.items(): + if isinstance(v, str): + with open(v, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + elif isinstance(v, bytes): + filename = k + filedata = v + else: + raise ValueError("Unsupported file value") + mimetype = ( + mimetypes.guess_type(filename)[0] + or 'application/octet-stream' + ) + params.append( + tuple([k, tuple([filename, filedata, mimetype])]) + ) + return params + + def select_header_accept(self, accepts: List[str]) -> Optional[str]: + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return None + + for accept in accepts: + if re.search('json', accept, re.IGNORECASE): + return accept + + return accepts[0] + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return None + + for content_type in content_types: + if re.search('json', content_type, re.IGNORECASE): + return content_type + + return content_types[0] + + def update_params_for_auth( + self, + headers, + queries, + auth_settings, + resource_path, + method, + body, + request_auth=None + ) -> None: + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param request_auth: if set, the provided settings will + override the token in the configuration. + """ + if not auth_settings: + return + + if request_auth: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + request_auth + ) + else: + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + auth_setting + ) + + def _apply_auth_params( + self, + headers, + queries, + resource_path, + method, + body, + auth_setting + ) -> None: + """Updates the request parameters based on a single auth_setting + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param auth_setting: auth settings for the endpoint + """ + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + queries.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + handle file downloading + save response body into a tmp file and return the instance + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + m = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition + ) + assert m is not None, "Unexpected 'content-disposition' header value" + filename = m.group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. + + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as `{1}`" + .format(data, klass) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/skyflow/generated/rest/api_response.py b/skyflow/generated/rest/api_response.py new file mode 100644 index 00000000..9bc7c11f --- /dev/null +++ b/skyflow/generated/rest/api_response.py @@ -0,0 +1,21 @@ +"""API response object.""" + +from __future__ import annotations +from typing import Optional, Generic, Mapping, TypeVar +from pydantic import Field, StrictInt, StrictBytes, BaseModel + +T = TypeVar("T") + +class ApiResponse(BaseModel, Generic[T]): + """ + API response object + """ + + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") + + model_config = { + "arbitrary_types_allowed": True + } diff --git a/skyflow/generated/rest/configuration.py b/skyflow/generated/rest/configuration.py new file mode 100644 index 00000000..5d983650 --- /dev/null +++ b/skyflow/generated/rest/configuration.py @@ -0,0 +1,464 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import copy +import logging +from logging import FileHandler +import multiprocessing +import sys +from typing import Optional +import urllib3 + +import http.client as httplib + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', + 'minimum', 'exclusiveMinimum', 'maxLength', + 'minLength', 'pattern', 'maxItems', 'minItems' +} + +class Configuration: + """This class contains various settings of the API client. + + :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer). + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication. + :param password: Password for HTTP basic authentication. + :param access_token: Access token. + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum + values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format. + :param retries: Number of retries for API requests. + + :Example: + """ + + _default = None + + def __init__(self, host=None, + api_key=None, api_key_prefix=None, + username=None, password=None, + access_token=None, + server_index=None, server_variables=None, + server_operation_index=None, server_operation_variables=None, + ignore_operation_servers=False, + ssl_ca_cert=None, + retries=None, + *, + debug: Optional[bool] = None + ) -> None: + """Constructor + """ + self._base_path = "https://identifier.vault.skyflowapis.com" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.access_token = access_token + """Access token + """ + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("skyflow.generated.rest") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler: Optional[FileHandler] = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + if debug is not None: + self.debug = debug + else: + self.__debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + self.tls_server_name = None + """SSL/TLS Server Name Indication (SNI) + Set this to the SNI value expected by the server. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy: Optional[str] = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = retries + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + self.socket_options = None + """Options to pass down to the underlying urllib3 socket + """ + + self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" + """datetime format + """ + + self.date_format = "%Y-%m-%d" + """date format + """ + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = default + + @classmethod + def get_default_copy(cls): + """Deprecated. Please use `get_default` instead. + + Deprecated. Please use `get_default` instead. + + :return: The configuration object. + """ + return cls.get_default() + + @classmethod + def get_default(cls): + """Return the default configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration. + + :return: The configuration object. + """ + if cls._default is None: + cls._default = Configuration() + return cls._default + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if self.access_token is not None: + auth['Bearer'] = { + 'type': 'bearer', + 'in': 'header', + 'format': 'JWT', + 'key': 'Authorization', + 'value': 'Bearer ' + self.access_token + } + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: v1\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "https://identifier.vault.skyflowapis.com", + 'description': "Production", + }, + { + 'url': "https://identifier.vault.skyflowapis-preview.com", + 'description': "Sandbox", + } + ] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server.get('variables', {}).items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/skyflow/generated/rest/exceptions.py b/skyflow/generated/rest/exceptions.py new file mode 100644 index 00000000..ef323e2e --- /dev/null +++ b/skyflow/generated/rest/exceptions.py @@ -0,0 +1,200 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from typing import Any, Optional +from typing_extensions import Self + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None) -> None: + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + + if http_resp: + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode('utf-8') + except Exception: + pass + self.headers = http_resp.getheaders() + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) + + return error_message + + +class BadRequestException(ApiException): + pass + + +class NotFoundException(ApiException): + pass + + +class UnauthorizedException(ApiException): + pass + + +class ForbiddenException(ApiException): + pass + + +class ServiceException(ApiException): + pass + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/skyflow/generated/rest/models/__init__.py b/skyflow/generated/rest/models/__init__.py new file mode 100644 index 00000000..379cf733 --- /dev/null +++ b/skyflow/generated/rest/models/__init__.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +# flake8: noqa +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +# import models into model package +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from skyflow.generated.rest.models.googlerpc_status import GooglerpcStatus +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from skyflow.generated.rest.models.query_service_execute_query_body import QueryServiceExecuteQueryBody +from skyflow.generated.rest.models.record_service_batch_operation_body import RecordServiceBatchOperationBody +from skyflow.generated.rest.models.record_service_bulk_delete_record_body import RecordServiceBulkDeleteRecordBody +from skyflow.generated.rest.models.record_service_insert_record_body import RecordServiceInsertRecordBody +from skyflow.generated.rest.models.record_service_update_record_body import RecordServiceUpdateRecordBody +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from skyflow.generated.rest.models.request_action_type import RequestActionType +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response import V1AuditResponse +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from skyflow.generated.rest.models.v1_bin_list_request import V1BINListRequest +from skyflow.generated.rest.models.v1_bin_list_response import V1BINListResponse +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_batch_operation_response import V1BatchOperationResponse +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_bulk_delete_record_response import V1BulkDeleteRecordResponse +from skyflow.generated.rest.models.v1_bulk_get_record_response import V1BulkGetRecordResponse +from skyflow.generated.rest.models.v1_card import V1Card +from skyflow.generated.rest.models.v1_delete_file_response import V1DeleteFileResponse +from skyflow.generated.rest.models.v1_delete_record_response import V1DeleteRecordResponse +from skyflow.generated.rest.models.v1_detokenize_payload import V1DetokenizePayload +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from skyflow.generated.rest.models.v1_detokenize_response import V1DetokenizeResponse +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from skyflow.generated.rest.models.v1_get_file_scan_status_response import V1GetFileScanStatusResponse +from skyflow.generated.rest.models.v1_get_query_response import V1GetQueryResponse +from skyflow.generated.rest.models.v1_insert_record_response import V1InsertRecordResponse +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from skyflow.generated.rest.models.v1_tokenize_payload import V1TokenizePayload +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from skyflow.generated.rest.models.v1_tokenize_response import V1TokenizeResponse +from skyflow.generated.rest.models.v1_update_record_response import V1UpdateRecordResponse +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig + +from skyflow.generated.rest.models.v1_get_auth_token_request import V1GetAuthTokenRequest +from skyflow.generated.rest.models.v1_get_auth_token_response import V1GetAuthTokenResponse \ No newline at end of file diff --git a/skyflow/generated/rest/models/audit_event_audit_resource_type.py b/skyflow/generated/rest/models/audit_event_audit_resource_type.py new file mode 100644 index 00000000..c425dce7 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_audit_resource_type.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class AuditEventAuditResourceType(str, Enum): + """ + Type of the resource. + """ + + """ + allowed enum values + """ + NONE_API = 'NONE_API' + ACCOUNT = 'ACCOUNT' + AUDIT = 'AUDIT' + BASE_DATA_TYPE = 'BASE_DATA_TYPE' + FIELD_TEMPLATE = 'FIELD_TEMPLATE' + FILE = 'FILE' + KEY = 'KEY' + POLICY = 'POLICY' + PROTO_PARSE = 'PROTO_PARSE' + RECORD = 'RECORD' + ROLE = 'ROLE' + RULE = 'RULE' + SECRET = 'SECRET' + SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' + TOKEN = 'TOKEN' + USER = 'USER' + VAULT = 'VAULT' + VAULT_TEMPLATE = 'VAULT_TEMPLATE' + WORKSPACE = 'WORKSPACE' + TABLE = 'TABLE' + POLICY_TEMPLATE = 'POLICY_TEMPLATE' + MEMBER = 'MEMBER' + TAG = 'TAG' + CONNECTION = 'CONNECTION' + MIGRATION = 'MIGRATION' + SCHEDULED_JOB = 'SCHEDULED_JOB' + JOB = 'JOB' + COLUMN_NAME = 'COLUMN_NAME' + NETWORK_TOKEN = 'NETWORK_TOKEN' + SUBSCRIPTION = 'SUBSCRIPTION' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of AuditEventAuditResourceType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/audit_event_context.py b/skyflow/generated/rest/models/audit_event_context.py new file mode 100644 index 00000000..af280eb0 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_context.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.context_access_type import ContextAccessType +from skyflow.generated.rest.models.context_auth_mode import ContextAuthMode +from skyflow.generated.rest.models.v1_member_type import V1MemberType +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventContext(BaseModel): + """ + Context for an audit event. + """ # noqa: E501 + change_id: Optional[StrictStr] = Field(default=None, description="ID for the audit event.", alias="changeID") + request_id: Optional[StrictStr] = Field(default=None, description="ID for the request that caused the event.", alias="requestID") + trace_id: Optional[StrictStr] = Field(default=None, description="ID for the request set by the service that received the request.", alias="traceID") + session_id: Optional[StrictStr] = Field(default=None, description="ID for the session in which the request was sent.", alias="sessionID") + actor: Optional[StrictStr] = Field(default=None, description="Member who sent the request. Depending on `actorType`, this may be a user ID or a service account ID.") + actor_type: Optional[V1MemberType] = Field(default=V1MemberType.NONE, alias="actorType") + access_type: Optional[ContextAccessType] = Field(default=ContextAccessType.ACCESS_NONE, alias="accessType") + ip_address: Optional[StrictStr] = Field(default=None, description="IP Address of the client that made the request.", alias="ipAddress") + origin: Optional[StrictStr] = Field(default=None, description="HTTP Origin request header (including scheme, hostname, and port) of the request.") + auth_mode: Optional[ContextAuthMode] = Field(default=ContextAuthMode.AUTH_NONE, alias="authMode") + jwt_id: Optional[StrictStr] = Field(default=None, description="ID of the JWT token.", alias="jwtID") + bearer_token_context_id: Optional[StrictStr] = Field(default=None, description="Embedded User Context.", alias="bearerTokenContextID") + __properties: ClassVar[List[str]] = ["changeID", "requestID", "traceID", "sessionID", "actor", "actorType", "accessType", "ipAddress", "origin", "authMode", "jwtID", "bearerTokenContextID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventContext from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventContext from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "changeID": obj.get("changeID"), + "requestID": obj.get("requestID"), + "traceID": obj.get("traceID"), + "sessionID": obj.get("sessionID"), + "actor": obj.get("actor"), + "actorType": obj.get("actorType") if obj.get("actorType") is not None else V1MemberType.NONE, + "accessType": obj.get("accessType") if obj.get("accessType") is not None else ContextAccessType.ACCESS_NONE, + "ipAddress": obj.get("ipAddress"), + "origin": obj.get("origin"), + "authMode": obj.get("authMode") if obj.get("authMode") is not None else ContextAuthMode.AUTH_NONE, + "jwtID": obj.get("jwtID"), + "bearerTokenContextID": obj.get("bearerTokenContextID") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/audit_event_data.py b/skyflow/generated/rest/models/audit_event_data.py new file mode 100644 index 00000000..5a463f00 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_data.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventData(BaseModel): + """ + Any Sensitive data that needs to be wrapped. + """ # noqa: E501 + content: Optional[StrictStr] = Field(default=None, description="The entire body of the data requested or the query fired.") + __properties: ClassVar[List[str]] = ["content"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "content": obj.get("content") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/audit_event_http_info.py b/skyflow/generated/rest/models/audit_event_http_info.py new file mode 100644 index 00000000..b3b2f074 --- /dev/null +++ b/skyflow/generated/rest/models/audit_event_http_info.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AuditEventHTTPInfo(BaseModel): + """ + AuditEventHTTPInfo + """ # noqa: E501 + uri: Optional[StrictStr] = Field(default=None, description="The http URI that is used.", alias="URI") + method: Optional[StrictStr] = Field(default=None, description="http method used.") + __properties: ClassVar[List[str]] = ["URI", "method"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuditEventHTTPInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuditEventHTTPInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "URI": obj.get("URI"), + "method": obj.get("method") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/batch_record_method.py b/skyflow/generated/rest/models/batch_record_method.py new file mode 100644 index 00000000..a2892049 --- /dev/null +++ b/skyflow/generated/rest/models/batch_record_method.py @@ -0,0 +1,41 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class BatchRecordMethod(str, Enum): + """ + Method of the operation. + """ + + """ + allowed enum values + """ + NONE = 'NONE' + POST = 'POST' + PUT = 'PUT' + GET = 'GET' + DELETE = 'DELETE' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of BatchRecordMethod from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/context_access_type.py b/skyflow/generated/rest/models/context_access_type.py new file mode 100644 index 00000000..e00a9df9 --- /dev/null +++ b/skyflow/generated/rest/models/context_access_type.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ContextAccessType(str, Enum): + """ + Type of access for the request. + """ + + """ + allowed enum values + """ + ACCESS_NONE = 'ACCESS_NONE' + API = 'API' + SQL = 'SQL' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ContextAccessType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/context_auth_mode.py b/skyflow/generated/rest/models/context_auth_mode.py new file mode 100644 index 00000000..fb803e7a --- /dev/null +++ b/skyflow/generated/rest/models/context_auth_mode.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ContextAuthMode(str, Enum): + """ + Authentication mode the `actor` used. + """ + + """ + allowed enum values + """ + AUTH_NONE = 'AUTH_NONE' + OKTA_JWT = 'OKTA_JWT' + SERVICE_ACCOUNT_JWT = 'SERVICE_ACCOUNT_JWT' + PAT_JWT = 'PAT_JWT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ContextAuthMode from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/detokenize_record_response_value_type.py b/skyflow/generated/rest/models/detokenize_record_response_value_type.py new file mode 100644 index 00000000..62460141 --- /dev/null +++ b/skyflow/generated/rest/models/detokenize_record_response_value_type.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class DetokenizeRecordResponseValueType(str, Enum): + """ + DetokenizeRecordResponseValueType + """ + + """ + allowed enum values + """ + NONE = 'NONE' + STRING = 'STRING' + INTEGER = 'INTEGER' + FLOAT = 'FLOAT' + BOOL = 'BOOL' + DATETIME = 'DATETIME' + JSON = 'JSON' + ARRAY = 'ARRAY' + DATE = 'DATE' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of DetokenizeRecordResponseValueType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/googlerpc_status.py b/skyflow/generated/rest/models/googlerpc_status.py new file mode 100644 index 00000000..b9914c58 --- /dev/null +++ b/skyflow/generated/rest/models/googlerpc_status.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.protobuf_any import ProtobufAny +from typing import Optional, Set +from typing_extensions import Self + +class GooglerpcStatus(BaseModel): + """ + GooglerpcStatus + """ # noqa: E501 + code: Optional[StrictInt] = None + message: Optional[StrictStr] = None + details: Optional[List[ProtobufAny]] = None + __properties: ClassVar[List[str]] = ["code", "message", "details"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GooglerpcStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item_details in self.details: + if _item_details: + _items.append(_item_details.to_dict()) + _dict['details'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GooglerpcStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "code": obj.get("code"), + "message": obj.get("message"), + "details": [ProtobufAny.from_dict(_item) for _item in obj["details"]] if obj.get("details") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/protobuf_any.py b/skyflow/generated/rest/models/protobuf_any.py new file mode 100644 index 00000000..e29a6356 --- /dev/null +++ b/skyflow/generated/rest/models/protobuf_any.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ProtobufAny(BaseModel): + """ + ProtobufAny + """ # noqa: E501 + type: Optional[StrictStr] = Field(default=None, alias="@type") + additional_properties: Dict[str, Any] = {} + __properties: ClassVar[List[str]] = ["@type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ProtobufAny from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + * Fields in `self.additional_properties` are added to the output dict. + """ + excluded_fields: Set[str] = set([ + "additional_properties", + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # puts key-value pairs in additional_properties in the top level + if self.additional_properties is not None: + for _key, _value in self.additional_properties.items(): + _dict[_key] = _value + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ProtobufAny from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "@type": obj.get("@type") + }) + # store additional fields in additional_properties + for _key in obj.keys(): + if _key not in cls.__properties: + _obj.additional_properties[_key] = obj.get(_key) + + return _obj + + diff --git a/skyflow/generated/rest/models/query_service_execute_query_body.py b/skyflow/generated/rest/models/query_service_execute_query_body.py new file mode 100644 index 00000000..fa6a9bf9 --- /dev/null +++ b/skyflow/generated/rest/models/query_service_execute_query_body.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class QueryServiceExecuteQueryBody(BaseModel): + """ + QueryServiceExecuteQueryBody + """ # noqa: E501 + query: Optional[StrictStr] = Field(default=None, description="The SQL query to execute.

Supported commands:
  • SELECT
Supported operators:
  • >
  • <
  • =
  • AND
  • OR
  • NOT
  • LIKE
  • ILIKE
  • NULL
  • NOT NULL
Supported keywords:
  • FROM
  • JOIN
  • INNER JOIN
  • LEFT OUTER JOIN
  • LEFT JOIN
  • RIGHT OUTER JOIN
  • RIGHT JOIN
  • FULL OUTER JOIN
  • FULL JOIN
  • OFFSET
  • LIMIT
  • WHERE
Supported functions:
  • AVG()
  • SUM()
  • COUNT()
  • MIN()
  • MAX()
  • REDACTION()
") + __properties: ClassVar[List[str]] = ["query"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QueryServiceExecuteQueryBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QueryServiceExecuteQueryBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "query": obj.get("query") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_batch_operation_body.py b/skyflow/generated/rest/models/record_service_batch_operation_body.py new file mode 100644 index 00000000..fe6ef37e --- /dev/null +++ b/skyflow/generated/rest/models/record_service_batch_operation_body.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_batch_record import V1BatchRecord +from skyflow.generated.rest.models.v1_byot import V1BYOT +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceBatchOperationBody(BaseModel): + """ + RecordServiceBatchOperationBody + """ # noqa: E501 + records: Optional[List[V1BatchRecord]] = Field(default=None, description="Record operations to perform.") + continue_on_error: Optional[StrictBool] = Field(default=None, description="Continue performing operations on partial errors.", alias="continueOnError") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["records", "continueOnError", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceBatchOperationBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceBatchOperationBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1BatchRecord.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, + "continueOnError": obj.get("continueOnError"), + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py b/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py new file mode 100644 index 00000000..b12f79a8 --- /dev/null +++ b/skyflow/generated/rest/models/record_service_bulk_delete_record_body.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceBulkDeleteRecordBody(BaseModel): + """ + RecordServiceBulkDeleteRecordBody + """ # noqa: E501 + skyflow_ids: Optional[List[StrictStr]] = Field(default=None, description="`skyflow_id` values of the records to delete. If `*` is specified, this operation deletes all records in the table.") + __properties: ClassVar[List[str]] = ["skyflow_ids"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceBulkDeleteRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceBulkDeleteRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_ids": obj.get("skyflow_ids") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_insert_record_body.py b/skyflow/generated/rest/models/record_service_insert_record_body.py new file mode 100644 index 00000000..c067fe25 --- /dev/null +++ b/skyflow/generated/rest/models/record_service_insert_record_body.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceInsertRecordBody(BaseModel): + """ + RecordServiceInsertRecordBody + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="Record values and tokens.") + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") + upsert: Optional[StrictStr] = Field(default=None, description="Name of a unique column in the table. Uses upsert operations to check if a record exists based on the unique column's value. If a matching record exists, the record updates with the values you provide. If a matching record doesn't exist, the upsert operation inserts a new record.

When you upsert a field, include the entire contents you want the field to store. For JSON fields, include all nested fields and values. If a nested field isn't included, it's removed.") + homogeneous: Optional[StrictBool] = Field(default=False, description="If `true`, this operation mandates that all the records have the same fields. This parameter does not work with upsert.") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["records", "tokenization", "upsert", "homogeneous", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceInsertRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceInsertRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None, + "tokenization": obj.get("tokenization"), + "upsert": obj.get("upsert"), + "homogeneous": obj.get("homogeneous") if obj.get("homogeneous") is not None else False, + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/record_service_update_record_body.py b/skyflow/generated/rest/models/record_service_update_record_body.py new file mode 100644 index 00000000..627a2f6e --- /dev/null +++ b/skyflow/generated/rest/models/record_service_update_record_body.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_byot import V1BYOT +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class RecordServiceUpdateRecordBody(BaseModel): + """ + RecordServiceUpdateRecordBody + """ # noqa: E501 + record: Optional[V1FieldRecords] = None + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled.") + byot: Optional[V1BYOT] = V1BYOT.DISABLE + __properties: ClassVar[List[str]] = ["record", "tokenization", "byot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RecordServiceUpdateRecordBody from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of record + if self.record: + _dict['record'] = self.record.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RecordServiceUpdateRecordBody from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "record": V1FieldRecords.from_dict(obj["record"]) if obj.get("record") is not None else None, + "tokenization": obj.get("tokenization"), + "byot": obj.get("byot") if obj.get("byot") is not None else V1BYOT.DISABLE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/redaction_enum_redaction.py b/skyflow/generated/rest/models/redaction_enum_redaction.py new file mode 100644 index 00000000..82f1a16e --- /dev/null +++ b/skyflow/generated/rest/models/redaction_enum_redaction.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class RedactionEnumREDACTION(str, Enum): + """ + Redaction type. Subject to policies assigned to the API caller. When used for detokenization, only supported for vaults that support [column groups](/tokenization-column-groups/). + """ + + """ + allowed enum values + """ + DEFAULT = 'DEFAULT' + REDACTED = 'REDACTED' + MASKED = 'MASKED' + PLAIN_TEXT = 'PLAIN_TEXT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RedactionEnumREDACTION from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/request_action_type.py b/skyflow/generated/rest/models/request_action_type.py new file mode 100644 index 00000000..2137d2eb --- /dev/null +++ b/skyflow/generated/rest/models/request_action_type.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class RequestActionType(str, Enum): + """ + RequestActionType + """ + + """ + allowed enum values + """ + NONE = 'NONE' + ASSIGN = 'ASSIGN' + CREATE = 'CREATE' + DELETE = 'DELETE' + EXECUTE = 'EXECUTE' + LIST = 'LIST' + READ = 'READ' + UNASSIGN = 'UNASSIGN' + UPDATE = 'UPDATE' + VALIDATE = 'VALIDATE' + LOGIN = 'LOGIN' + ROTATE = 'ROTATE' + SCHEDULEROTATION = 'SCHEDULEROTATION' + SCHEDULEROTATIONALERT = 'SCHEDULEROTATIONALERT' + IMPORT = 'IMPORT' + GETIMPORTPARAMETERS = 'GETIMPORTPARAMETERS' + PING = 'PING' + GETCLOUDPROVIDER = 'GETCLOUDPROVIDER' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RequestActionType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_audit_after_options.py b/skyflow/generated/rest/models/v1_audit_after_options.py new file mode 100644 index 00000000..f8c441ef --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_after_options.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditAfterOptions(BaseModel): + """ + V1AuditAfterOptions + """ # noqa: E501 + timestamp: Optional[StrictStr] = Field(default=None, description="Timestamp provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.") + change_id: Optional[StrictStr] = Field(default=None, description="Change ID provided in the previous audit response's `nextOps` attribute. An alternate way to manage response pagination. Can't be used with `sortOps` or `offset`. For the first request in a series of audit requests, leave blank.", alias="changeID") + __properties: ClassVar[List[str]] = ["timestamp", "changeID"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditAfterOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditAfterOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "changeID": obj.get("changeID") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_event_response.py b/skyflow/generated/rest/models/v1_audit_event_response.py new file mode 100644 index 00000000..bb78dfc8 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_event_response.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditEventResponse(BaseModel): + """ + Contains fields for defining Response Properties. + """ # noqa: E501 + code: Optional[StrictInt] = Field(default=None, description="The status of the overall operation.") + message: Optional[StrictStr] = Field(default=None, description="The status message for the overall operation.") + data: Optional[AuditEventData] = None + timestamp: Optional[StrictStr] = Field(default=None, description="time when this response is generated, use extention method to set it.") + __properties: ClassVar[List[str]] = ["code", "message", "data", "timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditEventResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditEventResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "code": obj.get("code"), + "message": obj.get("message"), + "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, + "timestamp": obj.get("timestamp") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response.py b/skyflow/generated/rest/models/v1_audit_response.py new file mode 100644 index 00000000..06a3d0df --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_audit_after_options import V1AuditAfterOptions +from skyflow.generated.rest.models.v1_audit_response_event import V1AuditResponseEvent +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponse(BaseModel): + """ + V1AuditResponse + """ # noqa: E501 + event: Optional[List[V1AuditResponseEvent]] = Field(default=None, description="Events matching the query.") + next_ops: Optional[V1AuditAfterOptions] = Field(default=None, alias="nextOps") + __properties: ClassVar[List[str]] = ["event", "nextOps"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in event (list) + _items = [] + if self.event: + for _item_event in self.event: + if _item_event: + _items.append(_item_event.to_dict()) + _dict['event'] = _items + # override the default output from pydantic by calling `to_dict()` of next_ops + if self.next_ops: + _dict['nextOps'] = self.next_ops.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "event": [V1AuditResponseEvent.from_dict(_item) for _item in obj["event"]] if obj.get("event") is not None else None, + "nextOps": V1AuditAfterOptions.from_dict(obj["nextOps"]) if obj.get("nextOps") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response_event.py b/skyflow/generated/rest/models/v1_audit_response_event.py new file mode 100644 index 00000000..0edd2a52 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response_event.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_context import AuditEventContext +from skyflow.generated.rest.models.v1_audit_event_response import V1AuditEventResponse +from skyflow.generated.rest.models.v1_audit_response_event_request import V1AuditResponseEventRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponseEvent(BaseModel): + """ + Audit event details. + """ # noqa: E501 + context: Optional[AuditEventContext] = None + request: Optional[V1AuditResponseEventRequest] = None + response: Optional[V1AuditEventResponse] = None + parent_account_id: Optional[StrictStr] = Field(default=None, description="Parent account ID of the account that made the request, if any.", alias="parentAccountID") + account_id: Optional[StrictStr] = Field(default=None, description="ID of the account that made the request.", alias="accountID") + resource_ids: Optional[List[StrictStr]] = Field(default=None, description="IDs for resources involved in the event. Presented in `{resourceType}/{resourceID}` format. For example, `VAULT/cd1d815aa09b4cbfbb803bd20349f202`.", alias="resourceIDs") + __properties: ClassVar[List[str]] = ["context", "request", "response", "parentAccountID", "accountID", "resourceIDs"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponseEvent from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of context + if self.context: + _dict['context'] = self.context.to_dict() + # override the default output from pydantic by calling `to_dict()` of request + if self.request: + _dict['request'] = self.request.to_dict() + # override the default output from pydantic by calling `to_dict()` of response + if self.response: + _dict['response'] = self.response.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponseEvent from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "context": AuditEventContext.from_dict(obj["context"]) if obj.get("context") is not None else None, + "request": V1AuditResponseEventRequest.from_dict(obj["request"]) if obj.get("request") is not None else None, + "response": V1AuditEventResponse.from_dict(obj["response"]) if obj.get("response") is not None else None, + "parentAccountID": obj.get("parentAccountID"), + "accountID": obj.get("accountID"), + "resourceIDs": obj.get("resourceIDs") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_audit_response_event_request.py b/skyflow/generated/rest/models/v1_audit_response_event_request.py new file mode 100644 index 00000000..2b4c6546 --- /dev/null +++ b/skyflow/generated/rest/models/v1_audit_response_event_request.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.audit_event_audit_resource_type import AuditEventAuditResourceType +from skyflow.generated.rest.models.audit_event_data import AuditEventData +from skyflow.generated.rest.models.audit_event_http_info import AuditEventHTTPInfo +from skyflow.generated.rest.models.request_action_type import RequestActionType +from typing import Optional, Set +from typing_extensions import Self + +class V1AuditResponseEventRequest(BaseModel): + """ + Contains fields for defining Request Properties. + """ # noqa: E501 + data: Optional[AuditEventData] = None + api_name: Optional[StrictStr] = Field(default=None, description="API name.", alias="apiName") + workspace_id: Optional[StrictStr] = Field(default=None, description="The workspaceID (if any) of the request.", alias="workspaceID") + vault_id: Optional[StrictStr] = Field(default=None, description="The vaultID (if any) of the request.", alias="vaultID") + tags: Optional[List[StrictStr]] = Field(default=None, description="Tags associated with the event. To provide better search capabilities. Like login.") + timestamp: Optional[StrictStr] = Field(default=None, description="time when this request is generated, use extention method to set it.") + action_type: Optional[RequestActionType] = Field(default=RequestActionType.NONE, alias="actionType") + resource_type: Optional[AuditEventAuditResourceType] = Field(default=AuditEventAuditResourceType.NONE_API, alias="resourceType") + http_info: Optional[AuditEventHTTPInfo] = Field(default=None, alias="httpInfo") + __properties: ClassVar[List[str]] = ["data", "apiName", "workspaceID", "vaultID", "tags", "timestamp", "actionType", "resourceType", "httpInfo"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1AuditResponseEventRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + # override the default output from pydantic by calling `to_dict()` of http_info + if self.http_info: + _dict['httpInfo'] = self.http_info.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1AuditResponseEventRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": AuditEventData.from_dict(obj["data"]) if obj.get("data") is not None else None, + "apiName": obj.get("apiName"), + "workspaceID": obj.get("workspaceID"), + "vaultID": obj.get("vaultID"), + "tags": obj.get("tags"), + "timestamp": obj.get("timestamp"), + "actionType": obj.get("actionType") if obj.get("actionType") is not None else RequestActionType.NONE, + "resourceType": obj.get("resourceType") if obj.get("resourceType") is not None else AuditEventAuditResourceType.NONE_API, + "httpInfo": AuditEventHTTPInfo.from_dict(obj["httpInfo"]) if obj.get("httpInfo") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_batch_operation_response.py b/skyflow/generated/rest/models/v1_batch_operation_response.py new file mode 100644 index 00000000..b790403f --- /dev/null +++ b/skyflow/generated/rest/models/v1_batch_operation_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1BatchOperationResponse(BaseModel): + """ + V1BatchOperationResponse + """ # noqa: E501 + vault_id: Optional[StrictStr] = Field(default=None, description="ID of the vault.", alias="vaultID") + responses: Optional[List[Dict[str, Any]]] = Field(default=None, description="Responses in the same order as in the request. Responses have the same payload structure as their corresponding APIs:
  • `POST` returns an Insert Records response.
  • `PUT` returns an Update Record response.
  • `GET` returns a Get Record response.
  • `DELETE` returns a Delete Record response.
") + __properties: ClassVar[List[str]] = ["vaultID", "responses"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BatchOperationResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BatchOperationResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "vaultID": obj.get("vaultID"), + "responses": obj.get("responses") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_batch_record.py b/skyflow/generated/rest/models/v1_batch_record.py new file mode 100644 index 00000000..76480a55 --- /dev/null +++ b/skyflow/generated/rest/models/v1_batch_record.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.batch_record_method import BatchRecordMethod +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from typing import Optional, Set +from typing_extensions import Self + +class V1BatchRecord(BaseModel): + """ + V1BatchRecord + """ # noqa: E501 + fields: Optional[Dict[str, Any]] = Field(default=None, description="Field and value key pairs. For example, `{'field_1':'value_1', 'field_2':'value_2'}`. Only valid when `method` is `POST` or `PUT`.") + table_name: Optional[StrictStr] = Field(default=None, description="Name of the table to perform the operation on.", alias="tableName") + method: Optional[BatchRecordMethod] = BatchRecordMethod.NONE + batch_id: Optional[StrictStr] = Field(default=None, description="ID to group operations by. Operations in the same group are executed sequentially.", alias="batchID") + redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT + tokenization: Optional[StrictBool] = Field(default=None, description="If `true`, this operation returns tokens for fields with tokenization enabled. Only applicable if `skyflow_id` values are specified.") + id: Optional[StrictStr] = Field(default=None, description="`skyflow_id` for the record. Only valid when `method` is `GET`, `DELETE`, or `PUT`.", alias="ID") + download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") + upsert: Optional[StrictStr] = Field(default=None, description="Column that stores primary keys for upsert operations. The column must be marked as unique in the vault schema. Only valid when `method` is `POST`.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") + __properties: ClassVar[List[str]] = ["fields", "tableName", "method", "batchID", "redaction", "tokenization", "ID", "downloadURL", "upsert", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BatchRecord from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BatchRecord from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "tableName": obj.get("tableName"), + "method": obj.get("method") if obj.get("method") is not None else BatchRecordMethod.NONE, + "batchID": obj.get("batchID"), + "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT, + "tokenization": obj.get("tokenization"), + "ID": obj.get("ID"), + "downloadURL": obj.get("downloadURL"), + "upsert": obj.get("upsert"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bin_list_request.py b/skyflow/generated/rest/models/v1_bin_list_request.py new file mode 100644 index 00000000..71de651e --- /dev/null +++ b/skyflow/generated/rest/models/v1_bin_list_request.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_vault_schema_config import V1VaultSchemaConfig +from typing import Optional, Set +from typing_extensions import Self + +class V1BINListRequest(BaseModel): + """ + Request to return specific card metadata. + """ # noqa: E501 + fields: Optional[List[StrictStr]] = Field(default=None, description="Fields to return. If not specified, all fields are returned.") + bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") + vault_schema_config: Optional[V1VaultSchemaConfig] = None + skyflow_id: Optional[StrictStr] = Field(default=None, description="skyflow_id of the record.") + __properties: ClassVar[List[str]] = ["fields", "BIN", "vault_schema_config", "skyflow_id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BINListRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of vault_schema_config + if self.vault_schema_config: + _dict['vault_schema_config'] = self.vault_schema_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BINListRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "BIN": obj.get("BIN"), + "vault_schema_config": V1VaultSchemaConfig.from_dict(obj["vault_schema_config"]) if obj.get("vault_schema_config") is not None else None, + "skyflow_id": obj.get("skyflow_id") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bin_list_response.py b/skyflow/generated/rest/models/v1_bin_list_response.py new file mode 100644 index 00000000..becf8bb4 --- /dev/null +++ b/skyflow/generated/rest/models/v1_bin_list_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_card import V1Card +from typing import Optional, Set +from typing_extensions import Self + +class V1BINListResponse(BaseModel): + """ + Response to the Get BIN request. + """ # noqa: E501 + cards_data: Optional[List[V1Card]] = Field(default=None, description="Card metadata associated with the specified BIN.") + __properties: ClassVar[List[str]] = ["cards_data"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BINListResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in cards_data (list) + _items = [] + if self.cards_data: + for _item_cards_data in self.cards_data: + if _item_cards_data: + _items.append(_item_cards_data.to_dict()) + _dict['cards_data'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BINListResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cards_data": [V1Card.from_dict(_item) for _item in obj["cards_data"]] if obj.get("cards_data") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bulk_delete_record_response.py b/skyflow/generated/rest/models/v1_bulk_delete_record_response.py new file mode 100644 index 00000000..726e1c40 --- /dev/null +++ b/skyflow/generated/rest/models/v1_bulk_delete_record_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1BulkDeleteRecordResponse(BaseModel): + """ + V1BulkDeleteRecordResponse + """ # noqa: E501 + record_id_response: Optional[List[StrictStr]] = Field(default=None, description="IDs for the deleted records, or `*` if all records were deleted.", alias="RecordIDResponse") + __properties: ClassVar[List[str]] = ["RecordIDResponse"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BulkDeleteRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BulkDeleteRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "RecordIDResponse": obj.get("RecordIDResponse") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_bulk_get_record_response.py b/skyflow/generated/rest/models/v1_bulk_get_record_response.py new file mode 100644 index 00000000..df8095df --- /dev/null +++ b/skyflow/generated/rest/models/v1_bulk_get_record_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class V1BulkGetRecordResponse(BaseModel): + """ + V1BulkGetRecordResponse + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="The specified records.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1BulkGetRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1BulkGetRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_byot.py b/skyflow/generated/rest/models/v1_byot.py new file mode 100644 index 00000000..754a70dc --- /dev/null +++ b/skyflow/generated/rest/models/v1_byot.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1BYOT(str, Enum): + """ + Token insertion behavior. - DISABLE: Tokens aren't allowed for any fields. If tokens are specified, the request fails. - ENABLE: Tokens are allowed—but not required—for all fields. If tokens are specified, they're inserted. - ENABLE_STRICT: Tokens are required for all fields. If tokens are specified, they're inserted. If not, the request fails. + """ + + """ + allowed enum values + """ + DISABLE = 'DISABLE' + ENABLE = 'ENABLE' + ENABLE_STRICT = 'ENABLE_STRICT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1BYOT from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_card.py b/skyflow/generated/rest/models/v1_card.py new file mode 100644 index 00000000..2245ee74 --- /dev/null +++ b/skyflow/generated/rest/models/v1_card.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1Card(BaseModel): + """ + Card metadata of the requested BIN. + """ # noqa: E501 + bin: Optional[StrictStr] = Field(default=None, description="BIN of the card.", alias="BIN") + issuer_name: Optional[StrictStr] = Field(default=None, description="Name of the card issuer bank.") + country_code: Optional[StrictStr] = Field(default=None, description="Country code of the card.") + currency: Optional[StrictStr] = Field(default=None, description="Currency of the card.") + card_type: Optional[StrictStr] = Field(default=None, description="Type of the card.") + card_category: Optional[StrictStr] = Field(default=None, description="Category of the card.") + card_scheme: Optional[StrictStr] = Field(default=None, description="Scheme of the card.") + card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Last four digits of the card number.") + card_expiry: Optional[StrictStr] = Field(default=None, description="Expiry date of the card.") + __properties: ClassVar[List[str]] = ["BIN", "issuer_name", "country_code", "currency", "card_type", "card_category", "card_scheme", "card_last_four_digits", "card_expiry"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1Card from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1Card from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "BIN": obj.get("BIN"), + "issuer_name": obj.get("issuer_name"), + "country_code": obj.get("country_code"), + "currency": obj.get("currency"), + "card_type": obj.get("card_type"), + "card_category": obj.get("card_category"), + "card_scheme": obj.get("card_scheme"), + "card_last_four_digits": obj.get("card_last_four_digits"), + "card_expiry": obj.get("card_expiry") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_delete_file_response.py b/skyflow/generated/rest/models/v1_delete_file_response.py new file mode 100644 index 00000000..e68030c0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_delete_file_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1DeleteFileResponse(BaseModel): + """ + V1DeleteFileResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the record.") + deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the file was deleted.") + __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DeleteFileResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DeleteFileResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "deleted": obj.get("deleted") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_delete_record_response.py b/skyflow/generated/rest/models/v1_delete_record_response.py new file mode 100644 index 00000000..a56d3ba2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_delete_record_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1DeleteRecordResponse(BaseModel): + """ + V1DeleteRecordResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the deleted record.") + deleted: Optional[StrictBool] = Field(default=None, description="If `true`, the record was deleted.") + __properties: ClassVar[List[str]] = ["skyflow_id", "deleted"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DeleteRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DeleteRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "deleted": obj.get("deleted") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_payload.py b/skyflow/generated/rest/models/v1_detokenize_payload.py new file mode 100644 index 00000000..0394aa1c --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_payload.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_detokenize_record_request import V1DetokenizeRecordRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizePayload(BaseModel): + """ + V1DetokenizePayload + """ # noqa: E501 + detokenization_parameters: Optional[List[V1DetokenizeRecordRequest]] = Field(default=None, description="Detokenization details.", alias="detokenizationParameters") + download_url: Optional[StrictBool] = Field(default=None, description="If `true`, returns download URLs for fields with a file data type. URLs are valid for 15 minutes. If virus scanning is enabled, only returns if the file is clean.", alias="downloadURL") + continue_on_error: Optional[StrictBool] = Field(default=False, description="If `true`, the detokenization request continues even if an error occurs.", alias="continueOnError") + __properties: ClassVar[List[str]] = ["detokenizationParameters", "downloadURL", "continueOnError"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in detokenization_parameters (list) + _items = [] + if self.detokenization_parameters: + for _item_detokenization_parameters in self.detokenization_parameters: + if _item_detokenization_parameters: + _items.append(_item_detokenization_parameters.to_dict()) + _dict['detokenizationParameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "detokenizationParameters": [V1DetokenizeRecordRequest.from_dict(_item) for _item in obj["detokenizationParameters"]] if obj.get("detokenizationParameters") is not None else None, + "downloadURL": obj.get("downloadURL"), + "continueOnError": obj.get("continueOnError") if obj.get("continueOnError") is not None else False + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_record_request.py b/skyflow/generated/rest/models/v1_detokenize_record_request.py new file mode 100644 index 00000000..2899501b --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_record_request.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.redaction_enum_redaction import RedactionEnumREDACTION +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeRecordRequest(BaseModel): + """ + V1DetokenizeRecordRequest + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token that identifies the record to detokenize.") + redaction: Optional[RedactionEnumREDACTION] = RedactionEnumREDACTION.DEFAULT + __properties: ClassVar[List[str]] = ["token", "redaction"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token"), + "redaction": obj.get("redaction") if obj.get("redaction") is not None else RedactionEnumREDACTION.DEFAULT + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_record_response.py b/skyflow/generated/rest/models/v1_detokenize_record_response.py new file mode 100644 index 00000000..2da5d15d --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_record_response.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.detokenize_record_response_value_type import DetokenizeRecordResponseValueType +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeRecordResponse(BaseModel): + """ + V1DetokenizeRecordResponse + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token of the record.") + value_type: Optional[DetokenizeRecordResponseValueType] = Field(default=DetokenizeRecordResponseValueType.NONE, alias="valueType") + value: Optional[StrictStr] = Field(default=None, description="Data corresponding to the token.") + error: Optional[StrictStr] = Field(default=None, description="Error if token isn't found.") + __properties: ClassVar[List[str]] = ["token", "valueType", "value", "error"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token"), + "valueType": obj.get("valueType") if obj.get("valueType") is not None else DetokenizeRecordResponseValueType.NONE, + "value": obj.get("value"), + "error": obj.get("error") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_detokenize_response.py b/skyflow/generated/rest/models/v1_detokenize_response.py new file mode 100644 index 00000000..34554aa0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_detokenize_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_detokenize_record_response import V1DetokenizeRecordResponse +from typing import Optional, Set +from typing_extensions import Self + +class V1DetokenizeResponse(BaseModel): + """ + V1DetokenizeResponse + """ # noqa: E501 + records: Optional[List[V1DetokenizeRecordResponse]] = Field(default=None, description="Records corresponding to the specified tokens.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1DetokenizeResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1DetokenizeResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1DetokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_field_records.py b/skyflow/generated/rest/models/v1_field_records.py new file mode 100644 index 00000000..913fd6d0 --- /dev/null +++ b/skyflow/generated/rest/models/v1_field_records.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1FieldRecords(BaseModel): + """ + Record values and tokens. + """ # noqa: E501 + fields: Optional[Dict[str, Any]] = Field(default=None, description="Fields and values for the record. For example, `{'field_1':'value_1', 'field_2':'value_2'}`.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Fields and tokens for the record. For example, `{'field_1':'token_1', 'field_2':'token_2'}`.") + __properties: ClassVar[List[str]] = ["fields", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1FieldRecords from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1FieldRecords from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fields": obj.get("fields"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_file_av_scan_status.py b/skyflow/generated/rest/models/v1_file_av_scan_status.py new file mode 100644 index 00000000..91479e32 --- /dev/null +++ b/skyflow/generated/rest/models/v1_file_av_scan_status.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1FileAVScanStatus(str, Enum): + """ + Anti-virus scan status of the file. + """ + + """ + allowed enum values + """ + SCAN_NONE = 'SCAN_NONE' + SCAN_CLEAN = 'SCAN_CLEAN' + SCAN_INFECTED = 'SCAN_INFECTED' + SCAN_DELETED = 'SCAN_DELETED' + SCAN_ERROR = 'SCAN_ERROR' + SCAN_PENDING = 'SCAN_PENDING' + SCAN_UNSCANNABLE = 'SCAN_UNSCANNABLE' + SCAN_FILE_NOT_FOUND = 'SCAN_FILE_NOT_FOUND' + SCAN_INVALID = 'SCAN_INVALID' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1FileAVScanStatus from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_get_auth_token_request.py b/skyflow/generated/rest/models/v1_get_auth_token_request.py new file mode 100644 index 00000000..fd5b201f --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_auth_token_request.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1GetAuthTokenRequest(BaseModel): + """ + V1GetAuthTokenRequest + """ # noqa: E501 + grant_type: StrictStr = Field(description="Grant type of the request. Set this to `urn:ietf:params:oauth:grant-type:jwt-bearer`.") + assertion: StrictStr = Field(description="User-signed JWT token that contains the following fields:
  • iss: Issuer of the JWT.
  • key: Unique identifier for the key.
  • aud: Recipient the JWT is intended for.
  • exp: Time the JWT expires.
  • sub: Subject of the JWT.
  • ctx: (Optional) Value for Context-aware authorization.
") + subject_token: Optional[StrictStr] = Field(default=None, description="Subject token.") + subject_token_type: Optional[StrictStr] = Field(default=None, description="Subject token type.") + requested_token_use: Optional[StrictStr] = Field(default=None, description="Token use type. Either `delegation` or `impersonation`.") + scope: Optional[StrictStr] = Field(default=None, description="Subset of available roles to associate with the requested token. Uses the format \"role:\\ role:\\\".") + __properties: ClassVar[List[str]] = ["grant_type", "assertion", "subject_token", "subject_token_type", "requested_token_use", "scope"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetAuthTokenRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetAuthTokenRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "grant_type": obj.get("grant_type"), + "assertion": obj.get("assertion"), + "subject_token": obj.get("subject_token"), + "subject_token_type": obj.get("subject_token_type"), + "requested_token_use": obj.get("requested_token_use"), + "scope": obj.get("scope") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_auth_token_response.py b/skyflow/generated/rest/models/v1_get_auth_token_response.py new file mode 100644 index 00000000..c3fccac2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_auth_token_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Management API + + # Management API This API controls aspects of your account and schema, including workspaces, vaults, keys, users, permissions, and more. The Management API is available from two base URIs:
  • Sandbox: https://manage.skyflowapis-preview.com
  • Production: https://manage.skyflowapis.com
When you make an API call, you need to add two headers:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
X-SKYFLOW-ACCOUNT-IDYour Skyflow account ID.X-SKYFLOW-ACCOUNT-ID: h451b763713e4424a7jke1bbkbbc84ef
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1GetAuthTokenResponse(BaseModel): + """ + V1GetAuthTokenResponse + """ # noqa: E501 + access_token: Optional[StrictStr] = Field(default=None, description="AccessToken.", alias="accessToken") + token_type: Optional[StrictStr] = Field(default=None, description="TokenType : Bearer.", alias="tokenType") + __properties: ClassVar[List[str]] = ["accessToken", "tokenType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetAuthTokenResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetAuthTokenResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "accessToken": obj.get("accessToken"), + "tokenType": obj.get("tokenType") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_file_scan_status_response.py b/skyflow/generated/rest/models/v1_get_file_scan_status_response.py new file mode 100644 index 00000000..78d83d19 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_file_scan_status_response.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_file_av_scan_status import V1FileAVScanStatus +from typing import Optional, Set +from typing_extensions import Self + +class V1GetFileScanStatusResponse(BaseModel): + """ + V1GetFileScanStatusResponse + """ # noqa: E501 + av_scan_status: Optional[V1FileAVScanStatus] = V1FileAVScanStatus.SCAN_NONE + __properties: ClassVar[List[str]] = ["av_scan_status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetFileScanStatusResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetFileScanStatusResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "av_scan_status": obj.get("av_scan_status") if obj.get("av_scan_status") is not None else V1FileAVScanStatus.NONE + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_get_query_response.py b/skyflow/generated/rest/models/v1_get_query_response.py new file mode 100644 index 00000000..3f7dd870 --- /dev/null +++ b/skyflow/generated/rest/models/v1_get_query_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_field_records import V1FieldRecords +from typing import Optional, Set +from typing_extensions import Self + +class V1GetQueryResponse(BaseModel): + """ + V1GetQueryResponse + """ # noqa: E501 + records: Optional[List[V1FieldRecords]] = Field(default=None, description="Records returned by the query.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1GetQueryResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1GetQueryResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1FieldRecords.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_insert_record_response.py b/skyflow/generated/rest/models/v1_insert_record_response.py new file mode 100644 index 00000000..142f1304 --- /dev/null +++ b/skyflow/generated/rest/models/v1_insert_record_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_record_meta_properties import V1RecordMetaProperties +from typing import Optional, Set +from typing_extensions import Self + +class V1InsertRecordResponse(BaseModel): + """ + V1InsertRecordResponse + """ # noqa: E501 + records: Optional[List[V1RecordMetaProperties]] = Field(default=None, description="Identifiers for the inserted records.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1InsertRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1InsertRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1RecordMetaProperties.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_member_type.py b/skyflow/generated/rest/models/v1_member_type.py new file mode 100644 index 00000000..60009732 --- /dev/null +++ b/skyflow/generated/rest/models/v1_member_type.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class V1MemberType(str, Enum): + """ + Type of the member. + """ + + """ + allowed enum values + """ + NONE = 'NONE' + USER = 'USER' + SERVICE_ACCOUNT = 'SERVICE_ACCOUNT' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1MemberType from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/skyflow/generated/rest/models/v1_record_meta_properties.py b/skyflow/generated/rest/models/v1_record_meta_properties.py new file mode 100644 index 00000000..add596f2 --- /dev/null +++ b/skyflow/generated/rest/models/v1_record_meta_properties.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1RecordMetaProperties(BaseModel): + """ + V1RecordMetaProperties + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the inserted record.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") + __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1RecordMetaProperties from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1RecordMetaProperties from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_payload.py b/skyflow/generated/rest/models/v1_tokenize_payload.py new file mode 100644 index 00000000..8a275f2b --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_payload.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_tokenize_record_request import V1TokenizeRecordRequest +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizePayload(BaseModel): + """ + V1TokenizePayload + """ # noqa: E501 + tokenization_parameters: Optional[List[V1TokenizeRecordRequest]] = Field(default=None, description="Tokenization details.", alias="tokenizationParameters") + __properties: ClassVar[List[str]] = ["tokenizationParameters"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tokenization_parameters (list) + _items = [] + if self.tokenization_parameters: + for _item_tokenization_parameters in self.tokenization_parameters: + if _item_tokenization_parameters: + _items.append(_item_tokenization_parameters.to_dict()) + _dict['tokenizationParameters'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tokenizationParameters": [V1TokenizeRecordRequest.from_dict(_item) for _item in obj["tokenizationParameters"]] if obj.get("tokenizationParameters") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_record_request.py b/skyflow/generated/rest/models/v1_tokenize_record_request.py new file mode 100644 index 00000000..e69e1e93 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_record_request.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeRecordRequest(BaseModel): + """ + V1TokenizeRecordRequest + """ # noqa: E501 + value: Optional[StrictStr] = Field(default=None, description="Existing value to return a token for.") + column_group: Optional[StrictStr] = Field(default=None, description="Name of the column group that the value belongs to.", alias="columnGroup") + __properties: ClassVar[List[str]] = ["value", "columnGroup"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeRecordRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeRecordRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value"), + "columnGroup": obj.get("columnGroup") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_record_response.py b/skyflow/generated/rest/models/v1_tokenize_record_response.py new file mode 100644 index 00000000..24ac8311 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_record_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeRecordResponse(BaseModel): + """ + V1TokenizeRecordResponse + """ # noqa: E501 + token: Optional[StrictStr] = Field(default=None, description="Token corresponding to a value.") + __properties: ClassVar[List[str]] = ["token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_tokenize_response.py b/skyflow/generated/rest/models/v1_tokenize_response.py new file mode 100644 index 00000000..4847bae5 --- /dev/null +++ b/skyflow/generated/rest/models/v1_tokenize_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_tokenize_record_response import V1TokenizeRecordResponse +from typing import Optional, Set +from typing_extensions import Self + +class V1TokenizeResponse(BaseModel): + """ + V1TokenizeResponse + """ # noqa: E501 + records: Optional[List[V1TokenizeRecordResponse]] = Field(default=None, description="Tokens corresponding to the specified values.") + __properties: ClassVar[List[str]] = ["records"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1TokenizeResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in records (list) + _items = [] + if self.records: + for _item_records in self.records: + if _item_records: + _items.append(_item_records.to_dict()) + _dict['records'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1TokenizeResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "records": [V1TokenizeRecordResponse.from_dict(_item) for _item in obj["records"]] if obj.get("records") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_update_record_response.py b/skyflow/generated/rest/models/v1_update_record_response.py new file mode 100644 index 00000000..0d66a403 --- /dev/null +++ b/skyflow/generated/rest/models/v1_update_record_response.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1UpdateRecordResponse(BaseModel): + """ + V1UpdateRecordResponse + """ # noqa: E501 + skyflow_id: Optional[StrictStr] = Field(default=None, description="ID of the updated record.") + tokens: Optional[Dict[str, Any]] = Field(default=None, description="Tokens for the record.") + __properties: ClassVar[List[str]] = ["skyflow_id", "tokens"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1UpdateRecordResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1UpdateRecordResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "skyflow_id": obj.get("skyflow_id"), + "tokens": obj.get("tokens") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_vault_field_mapping.py b/skyflow/generated/rest/models/v1_vault_field_mapping.py new file mode 100644 index 00000000..b00c92e5 --- /dev/null +++ b/skyflow/generated/rest/models/v1_vault_field_mapping.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class V1VaultFieldMapping(BaseModel): + """ + Mapping of the fields in the vault to the fields to use for the lookup. + """ # noqa: E501 + card_number: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number.") + card_last_four_digits: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the card number suffix.") + card_expiry: Optional[StrictStr] = Field(default=None, description="Name of the column that stores the expiry date.") + __properties: ClassVar[List[str]] = ["card_number", "card_last_four_digits", "card_expiry"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1VaultFieldMapping from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1VaultFieldMapping from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "card_number": obj.get("card_number"), + "card_last_four_digits": obj.get("card_last_four_digits"), + "card_expiry": obj.get("card_expiry") + }) + return _obj + + diff --git a/skyflow/generated/rest/models/v1_vault_schema_config.py b/skyflow/generated/rest/models/v1_vault_schema_config.py new file mode 100644 index 00000000..e57e21ff --- /dev/null +++ b/skyflow/generated/rest/models/v1_vault_schema_config.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from skyflow.generated.rest.models.v1_vault_field_mapping import V1VaultFieldMapping +from typing import Optional, Set +from typing_extensions import Self + +class V1VaultSchemaConfig(BaseModel): + """ + Details of the vault that stores additional card details. + """ # noqa: E501 + id: Optional[StrictStr] = Field(default=None, description="ID of the vault that stores card details.") + table_name: Optional[StrictStr] = Field(default=None, description="Name of the table that stores card details.") + mapping: Optional[V1VaultFieldMapping] = None + __properties: ClassVar[List[str]] = ["id", "table_name", "mapping"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1VaultSchemaConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of mapping + if self.mapping: + _dict['mapping'] = self.mapping.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1VaultSchemaConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "table_name": obj.get("table_name"), + "mapping": V1VaultFieldMapping.from_dict(obj["mapping"]) if obj.get("mapping") is not None else None + }) + return _obj + + diff --git a/skyflow/generated/rest/py.typed b/skyflow/generated/rest/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/generated/rest/rest.py b/skyflow/generated/rest/rest.py new file mode 100644 index 00000000..1aaefdb3 --- /dev/null +++ b/skyflow/generated/rest/rest.py @@ -0,0 +1,258 @@ +# coding: utf-8 + +""" + Skyflow Data API + + # Data API This API inserts, retrieves, and otherwise manages data in a vault. The Data API is available from two base URIs. *identifier* is the identifier in your vault's URL.
  • Sandbox: https://*identifier*.vault.skyflowapis-preview.com
  • Production: https://*identifier*.vault.skyflowapis.com
When you make an API call, you need to add a header:
HeaderValueExample
AuthorizationA Bearer Token. See API Authentication.Authorization: Bearer eyJhbGciOiJSUzI...1NiIsJdfPA
+ + The version of the OpenAPI document: v1 + Contact: support@skyflow.com + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import io +import json +import re +import ssl + +import urllib3 + +from skyflow.generated.rest.exceptions import ApiException, ApiValueError + +SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} +RESTResponseType = urllib3.HTTPResponse + + +def is_socks_proxy_url(url): + if url is None: + return False + split_section = url.split("://") + if len(split_section) < 2: + return False + else: + return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES + + +class RESTResponse(io.IOBase): + + def __init__(self, resp) -> None: + self.response = resp + self.status = resp.status + self.reason = resp.reason + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.data + return self.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.response.headers + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.response.headers.get(name, default) + + +class RESTClientObject: + + def __init__(self, configuration) -> None: + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + pool_args = { + "cert_reqs": cert_reqs, + "ca_certs": configuration.ssl_ca_cert, + "cert_file": configuration.cert_file, + "key_file": configuration.key_file, + } + if configuration.assert_hostname is not None: + pool_args['assert_hostname'] = ( + configuration.assert_hostname + ) + + if configuration.retries is not None: + pool_args['retries'] = configuration.retries + + if configuration.tls_server_name: + pool_args['server_hostname'] = configuration.tls_server_name + + + if configuration.socket_options is not None: + pool_args['socket_options'] = configuration.socket_options + + if configuration.connection_pool_maxsize is not None: + pool_args['maxsize'] = configuration.connection_pool_maxsize + + # https pool manager + self.pool_manager: urllib3.PoolManager + + if configuration.proxy: + if is_socks_proxy_url(configuration.proxy): + from urllib3.contrib.socks import SOCKSProxyManager + pool_args["proxy_url"] = configuration.proxy + pool_args["headers"] = configuration.proxy_headers + self.pool_manager = SOCKSProxyManager(**pool_args) + else: + pool_args["proxy_url"] = configuration.proxy + pool_args["proxy_headers"] = configuration.proxy_headers + self.pool_manager = urllib3.ProxyManager(**pool_args) + else: + self.pool_manager = urllib3.PoolManager(**pool_args) + + def request( + self, + method, + url, + headers=None, + body=None, + post_params=None, + _request_timeout=None + ): + """Perform requests. + + :param method: http request method + :param url: http request url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in [ + 'GET', + 'HEAD', + 'DELETE', + 'POST', + 'PUT', + 'PATCH', + 'OPTIONS' + ] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, float)): + timeout = urllib3.Timeout(total=_request_timeout) + elif ( + isinstance(_request_timeout, tuple) + and len(_request_timeout) == 2 + ): + timeout = urllib3.Timeout( + connect=_request_timeout[0], + read=_request_timeout[1] + ) + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + + # no content type provided or payload is json + content_type = headers.get('Content-Type') + if ( + not content_type + or re.search('json', content_type, re.IGNORECASE) + ): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, + url, + body=request_body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'application/x-www-form-urlencoded': + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=False, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + # Ensures that dict objects are serialized + post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params] + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=True, + timeout=timeout, + headers=headers, + preload_content=False + ) + # Pass a `string` parameter directly in the body to support + # other content types than JSON when `body` argument is + # provided in serialized form. + elif isinstance(body, str) or isinstance(body, bytes): + r = self.pool_manager.request( + method, + url, + body=body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.pool_manager.request( + method, + url, + body=request_body, + preload_content=False, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request( + method, + url, + fields={}, + timeout=timeout, + headers=headers, + preload_content=False + ) + except urllib3.exceptions.SSLError as e: + msg = "\n".join([type(e).__name__, str(e)]) + raise ApiException(status=0, reason=msg) + + return RESTResponse(r) diff --git a/skyflow/service_account/__init__.py b/skyflow/service_account/__init__.py index dcd8bca9..20d09a8e 100644 --- a/skyflow/service_account/__init__.py +++ b/skyflow/service_account/__init__.py @@ -1,8 +1 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._token import generate_bearer_token -from ._token import generate_bearer_token -from ._token import ResponseToken -from ._token import generate_bearer_token_from_creds -from ._validity import is_expired +from ._utils import generate_bearer_token, generate_bearer_token_from_creds, is_expired, generate_signed_data_tokens, generate_signed_data_tokens_from_creds \ No newline at end of file diff --git a/skyflow/service_account/_token.py b/skyflow/service_account/_token.py deleted file mode 100644 index f73191b1..00000000 --- a/skyflow/service_account/_token.py +++ /dev/null @@ -1,179 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import jwt -import datetime -import requests -from warnings import warn -from collections import namedtuple -from skyflow._utils import log_info, InterfaceName, InfoMessages, getMetrics - - -from skyflow.errors._skyflow_errors import * - -ResponseToken = namedtuple('ResponseToken', ['AccessToken', 'TokenType']) -interface = InterfaceName.GENERATE_BEARER_TOKEN - - -def generate_bearer_token(credentialsFilePath: str) -> ResponseToken: - ''' - This function is used to get the access token for skyflow Service Accounts. - `credentialsFilePath` is the file path in string of the credentials file that is downloaded after Service Account creation. - - Response Token is a named tupe with two attributes: - 1. AccessToken: The access token - 2. TokenType: The type of access token (eg: Bearer) - ''' - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_TRIGGERED.value, - interface=interface) - - try: - credentialsFile = open(credentialsFilePath, 'r') - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FILE_NOT_FOUND.value % (credentialsFilePath), interface=interface) - - try: - credentials = json.load(credentialsFile) - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FILE_INVALID_JSON.value % (credentialsFilePath), interface=interface) - finally: - credentialsFile.close() - - result = getSAToken(credentials) - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_SUCCESS.value, - interface=interface) - return result - - -def generate_bearer_token_from_creds(credentials: str) -> ResponseToken: - ''' - This function is used to get the access token for skyflow Service Accounts. - `credentials` arg takes the content of the credentials file that is downloaded after Service Account creation. - - Response Token is a named tupe with two attributes: - 1. AccessToken: The access token - 2. TokenType: The type of access token (eg: Bearer) - ''' - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_TRIGGERED.value, - interface=interface) - try: - jsonCredentials = json.loads(credentials) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_CREDENTIALS, interface=interface) - result = getSAToken(jsonCredentials) - - log_info(InfoMessages.GENERATE_BEARER_TOKEN_SUCCESS.value, - interface=interface) - return result - - -def getSAToken(credentials): - try: - privateKey = credentials["privateKey"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_PRIVATE_KEY, interface=interface) - try: - clientID = credentials["clientID"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_CLIENT_ID, interface=interface) - try: - keyID = credentials["keyID"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_KEY_ID, interface=interface) - try: - tokenURI = credentials["tokenURI"] - except: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.MISSING_TOKEN_URI, interface=interface) - - signedToken = getSignedJWT(clientID, keyID, tokenURI, privateKey) - - response = sendRequestWithToken(tokenURI, signedToken) - content = response.content.decode('utf-8') - - try: - token = json.loads(content) - except json.decoder.JSONDecodeError as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RESPONSE_NOT_JSON % content, interface=interface) - return getResponseToken(token) - - -def getSignedJWT(clientID, keyID, tokenURI, privateKey): - payload = { - "iss": clientID, - "key": keyID, - "aud": tokenURI, - "sub": clientID, - "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60) - } - try: - return jwt.encode(payload=payload, key=privateKey, algorithm="RS256") - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.JWT_INVALID_FORMAT, interface=interface) - - -def sendRequestWithToken(url, token): - headers = { - "content-type": "application/json", - "sky-metadata": json.dumps(getMetrics()) - } - payload = { - "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", - "assertion": token - } - try: - response = requests.post(url=url, json=payload, headers=headers) - statusCode = response.status_code - except requests.exceptions.InvalidURL: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_URL.value % (url), interface=interface) - except requests.exceptions.MissingSchema: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_URL.value % (url), interface=interface) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if error.response != None and error.response.content != None: - try: - errorResponse = json.loads( - error.response.content.decode('utf-8')) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % error.response.content.decode( - 'utf-8') - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - raise SkyflowError(statusCode, message, interface=interface) - - return response - - -def getResponseToken(token): - try: - accessToken = token["accessToken"] - except: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, - SkyflowErrorMessages.MISSING_ACCESS_TOKEN, interface=interface) - - try: - tokenType = token["tokenType"] - except: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, - SkyflowErrorMessages.MISSING_TOKEN_TYPE, interface=interface) - - return ResponseToken(AccessToken=accessToken, TokenType=tokenType) diff --git a/skyflow/service_account/_utils.py b/skyflow/service_account/_utils.py new file mode 100644 index 00000000..12ae41e6 --- /dev/null +++ b/skyflow/service_account/_utils.py @@ -0,0 +1,180 @@ +import json +import datetime +import time +import jwt +from skyflow.error import SkyflowError +from skyflow.generated.rest.models import V1GetAuthTokenRequest +from skyflow.service_account.client.auth_client import AuthClient +from skyflow.utils.logger import log_error, log_info, log_error_log, Logger +from skyflow.utils import get_base_url, format_scope, SkyflowMessages + + +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def is_expired(token, logger = None): + if len(token) == 0: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_BEARER_TOKEN.value) + return True + + try: + decoded = jwt.decode( + token, options={"verify_signature": False, "verify_aud": False}) + if time.time() >= decoded['exp']: + log_info(SkyflowMessages.Info.BEARER_TOKEN_EXPIRED.value, logger) + log_error_log(SkyflowMessages.ErrorLogs.INVALID_BEARER_TOKEN.value) + return True + return False + except jwt.ExpiredSignatureError: + return True + except Exception: + log_error(SkyflowMessages.Error.JWT_DECODE_ERROR.value, invalid_input_error_code, logger = logger) + return True + +def generate_bearer_token(credentials_file_path, options = None, logger = None): + try: + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_TRIGGERED.value, logger) + credentials_file =open(credentials_file_path, 'r') + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value, invalid_input_error_code) + + try: + credentials = json.load(credentials_file) + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_CREDENTIALS_FILE.value, logger = logger) + raise SkyflowError(SkyflowMessages.Error.FILE_INVALID_JSON.value.format(credentials_file_path), invalid_input_error_code) + + finally: + credentials_file.close() + result = get_service_account_token(credentials, options, logger) + return result + +def generate_bearer_token_from_creds(credentials, options = None, logger = None): + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_TRIGGERED.value, logger) + credentials = credentials.strip() + try: + json_credentials = json.loads(credentials.replace('\n', '\\n')) + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value, invalid_input_error_code) + result = get_service_account_token(json_credentials, options, logger) + return result + +def get_service_account_token(credentials, options, logger): + try: + private_key = credentials["privateKey"] + except: + log_error_log(SkyflowMessages.ErrorLogs.PRIVATE_KEY_IS_REQUIRED.value, logger = logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_PRIVATE_KEY.value, invalid_input_error_code) + try: + client_id = credentials["clientID"] + except: + log_error_log(SkyflowMessages.ErrorLogs.CLIENT_ID_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_CLIENT_ID.value, invalid_input_error_code) + try: + key_id = credentials["keyID"] + except: + log_error_log(SkyflowMessages.ErrorLogs.KEY_ID_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_KEY_ID.value, invalid_input_error_code) + try: + token_uri = credentials["tokenURI"] + except: + log_error_log(SkyflowMessages.ErrorLogs.TOKEN_URI_IS_REQUIRED.value, logger=logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_TOKEN_URI.value, invalid_input_error_code) + + signed_token = get_signed_jwt(options, client_id, key_id, token_uri, private_key, logger) + base_url = get_base_url(token_uri) + auth_client = AuthClient(base_url) + auth_api = auth_client.get_auth_api() + + formatted_scope = None + if options and "role_ids" in options: + formatted_scope = format_scope(options.get("role_ids")) + + request = V1GetAuthTokenRequest(assertion = signed_token, + grant_type="urn:ietf:params:oauth:grant-type:jwt-bearer", + scope=formatted_scope) + response = auth_api.authentication_service_get_auth_token(request) + log_info(SkyflowMessages.Info.GET_BEARER_TOKEN_SUCCESS.value, logger) + return response.access_token, response.token_type + +def get_signed_jwt(options, client_id, key_id, token_uri, private_key, logger): + payload = { + "iss": client_id, + "key": key_id, + "aud": token_uri, + "sub": client_id, + "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60) + } + if options and "ctx" in options: + payload["ctx"] = options.get("ctx") + try: + return jwt.encode(payload=payload, key=private_key, algorithm="RS256") + except Exception: + raise SkyflowError(SkyflowMessages.Error.JWT_INVALID_FORMAT.value, invalid_input_error_code) + + + +def get_signed_tokens(credentials_obj, options): + try: + expiry_time = time.time() + options.get("time_to_live", 60) + prefix = "signed_token_" + response_array=[] + + if options and options.get("data_tokens"): + for token in options["data_tokens"]: + claims = { + "iss": "sdk", + "key": credentials_obj.get("keyID"), + "aud": credentials_obj.get("tokenURI"), + "exp": expiry_time, + "sub": credentials_obj.get("clientID"), + "tok": token + } + + if "ctx" in options: + claims["ctx"] = options["ctx"] + + private_key = credentials_obj.get("privateKey") + signed_jwt = jwt.encode(claims, private_key, algorithm="RS256") + response_object = get_signed_data_token_response_object(prefix + signed_jwt, token) + response_array.append(response_object) + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKEN_SUCCESS.value) + return response_array + + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS.value, invalid_input_error_code) + + +def generate_signed_data_tokens(credentials_file_path, options): + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKENS_TRIGGERED.value) + try: + credentials_file =open(credentials_file_path, 'r') + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value, invalid_input_error_code) + + try: + credentials = json.load(credentials_file) + except Exception: + raise SkyflowError(SkyflowMessages.Error.FILE_INVALID_JSON.value.format(credentials_file_path), + invalid_input_error_code) + + finally: + credentials_file.close() + + return get_signed_tokens(credentials, options) + +def generate_signed_data_tokens_from_creds(credentials, options): + log_info(SkyflowMessages.Info.GET_SIGNED_DATA_TOKENS_TRIGGERED.value) + credentials = credentials.strip() + try: + json_credentials = json.loads(credentials.replace('\n', '\\n')) + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_CREDENTIALS_FILE.value) + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value, invalid_input_error_code) + return get_signed_tokens(json_credentials, options) + +def get_signed_data_token_response_object(signed_token, actual_token): + response_object = { + "token": actual_token, + "signed_token": signed_token + } + return response_object.get("token"), response_object.get("signed_token") diff --git a/skyflow/service_account/_validity.py b/skyflow/service_account/_validity.py deleted file mode 100644 index 8b9229ac..00000000 --- a/skyflow/service_account/_validity.py +++ /dev/null @@ -1,33 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from warnings import warn -import jwt -import time -from skyflow.errors._skyflow_errors import * -from skyflow._utils import InterfaceName, log_info, log_error, InfoMessages - - -def is_expired(token: str): - ''' - Check if stored token is not expired, if not return a new token, - if the token has expiry time before 5min of current time, call returns False - ''' - interface = InterfaceName.IS_EXPIRED.value - log_info(InfoMessages.IS_EXPIRED_TRIGGERED.value, interface=interface) - if len(token) == 0: - log_info(InfoMessages.EMPTY_ACCESS_TOKEN, interface=interface) - return True - - try: - decoded = jwt.decode( - token, options={"verify_signature": False, "verify_aud": False}) - if time.time() < decoded['exp']: - return False - except jwt.ExpiredSignatureError: - return True - except Exception as e: - log_error(InfoMessages.INVALID_TOKEN.value, interface=interface) - return True - - return True diff --git a/skyflow/service_account/client/__init__.py b/skyflow/service_account/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/service_account/client/auth_client.py b/skyflow/service_account/client/auth_client.py new file mode 100644 index 00000000..c1cc9cb2 --- /dev/null +++ b/skyflow/service_account/client/auth_client.py @@ -0,0 +1,18 @@ +from skyflow.generated.rest import Configuration, ApiClient +from skyflow.generated.rest.api import AuthenticationApi + + +class AuthClient: + def __init__(self, url): + self.__url = url + self.__client_configuration = self.initialize_client_configuration() + self.__api_client = self.initialize_api_client() + + def initialize_client_configuration(self): + return Configuration(host=self.__url) + + def initialize_api_client(self): + return ApiClient(self.__client_configuration) + + def get_auth_api(self): + return AuthenticationApi(self.__api_client) \ No newline at end of file diff --git a/skyflow/utils/__init__.py b/skyflow/utils/__init__.py new file mode 100644 index 00000000..786c4d9e --- /dev/null +++ b/skyflow/utils/__init__.py @@ -0,0 +1,6 @@ +from ..utils.enums import LogLevel, Env +from ._skyflow_messages import SkyflowMessages +from ._version import SDK_VERSION +from ._helpers import get_base_url, format_scope +from ._utils import get_credentials, get_vault_url, construct_invoke_connection_request, get_metrics, parse_insert_response, handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_get_response, parse_invoke_connection_response, validate_api_key + diff --git a/skyflow/utils/_helpers.py b/skyflow/utils/_helpers.py new file mode 100644 index 00000000..97eecabc --- /dev/null +++ b/skyflow/utils/_helpers.py @@ -0,0 +1,11 @@ +from urllib.parse import urlparse + +def get_base_url(url): + parsed_url = urlparse(url) + base_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + return base_url + +def format_scope(scopes): + if not scopes: + return None + return " ".join([f"role:{scope}" for scope in scopes]) \ No newline at end of file diff --git a/skyflow/utils/_skyflow_messages.py b/skyflow/utils/_skyflow_messages.py new file mode 100644 index 00000000..afbb07ec --- /dev/null +++ b/skyflow/utils/_skyflow_messages.py @@ -0,0 +1,292 @@ +from enum import Enum + +from ._version import SDK_VERSION + +error_prefix = f"Skyflow Python SDK {SDK_VERSION}" +INFO = "INFO" +ERROR = "ERROR" + +class SkyflowMessages: + class ErrorCodes(Enum): + INVALID_INPUT = 400 + INVALID_INDEX = 404 + SERVER_ERROR = 500 + PARTIAL_SUCCESS = 500 + TOKENS_GET_COLUMN_NOT_SUPPORTED = 400 + REDACTION_WITH_TOKENS_NOT_SUPPORTED = 400 + + class Error(Enum): + EMPTY_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id." + INVALID_VAULT_ID = f"{error_prefix} Initialization failed. Invalid vault Id. Specify a valid vault Id as a string." + EMPTY_CLUSTER_ID = f"{error_prefix} Initialization failed. Invalid cluster Id for vault with id {{}}. Specify a valid cluster Id." + INVALID_CLUSTER_ID = f"{error_prefix} Initialization failed. Invalid cluster Id for vault with id {{}}. Specify cluster Id as a string." + INVALID_ENV = f"{error_prefix} Initialization failed. Invalid env for vault with id {{}}. Specify a valid env." + INVALID_KEY = f"{error_prefix} Initialization failed. Invalid {{}}. Specify a valid key" + VAULT_ID_NOT_IN_CONFIG_LIST = f"{error_prefix} Validation error. Vault id {{}} is missing from the config. Specify the vault id from configs." + EMPTY_VAULT_CONFIGS = f"{error_prefix} Validation error. Specify at least one vault config." + EMPTY_CONNECTION_CONFIGS = f"{error_prefix} Validation error. Specify at least one connection config." + VAULT_ID_ALREADY_EXISTS =f"{error_prefix} Initialization failed. vault with id {{}} already exists." + CONNECTION_ID_ALREADY_EXISTS = f"{error_prefix} Initialization failed. Connection with id {{}} already exists." + + EMPTY_CREDENTIALS = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Credentials must not be empty." + INVALID_CREDENTIALS_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify a valid credentials." + INVALID_CREDENTIALS = f"{error_prefix} Validation error. Invalid credentials. Specify a valid credentials." + MULTIPLE_CREDENTIALS_PASSED_IN_CONFIG = f"{error_prefix} Validation error. Multiple credentials provided for {{}} with id {{}}. Please specify only one valid credential." + MULTIPLE_CREDENTIALS_PASSED = f"{error_prefix} Validation error. Multiple credentials provided. Please specify only one valid credential." + EMPTY_CREDENTIALS_STRING_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify valid credentials." + EMPTY_CREDENTIALS_STRING = f"{error_prefix} Validation error. Invalid credentials. Specify valid credentials." + INVALID_CREDENTIALS_STRING_IN_CONFIG = f"{error_prefix} Validation error. Invalid credentials for {{}} with id {{}}. Specify credentials as a string." + INVALID_CREDENTIALS_STRING = f"{error_prefix} Validation error. Invalid credentials. Specify credentials as a string." + EMPTY_CREDENTIAL_FILE_PATH_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials for {{}} with id {{}}. Specify a valid file path." + EMPTY_CREDENTIAL_FILE_PATH = f"{error_prefix} Initialization failed. Invalid credentials. Specify a valid file path." + INVALID_CREDENTIAL_FILE_PATH_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials for {{}} with id {{}}. Expected file path to be a string." + INVALID_CREDENTIAL_FILE_PATH = f"{error_prefix} Initialization failed. Invalid credentials. Expected file path to be a string." + EMPTY_CREDENTIALS_TOKEN_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid token for {{}} with id {{}}.Specify a valid credentials token." + EMPTY_CREDENTIALS_TOKEN = f"{error_prefix} Initialization failed. Invalid token.Specify a valid credentials token." + INVALID_CREDENTIALS_TOKEN_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid credentials token for {{}} with id {{}}. Expected token to be a string." + INVALID_CREDENTIALS_TOKEN = f"{error_prefix} Initialization failed. Invalid credentials token. Expected token to be a string." + EXPIRED_TOKEN = f"${error_prefix} Initialization failed. Given token is expired. Specify a valid credentials token." + EMPTY_API_KEY_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid api key for {{}} with id {{}}.Specify a valid api key." + EMPTY_API_KEY= f"{error_prefix} Initialization failed. Invalid api key.Specify a valid api key." + INVALID_API_KEY_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid api key for {{}} with id {{}}. Expected api key to be a string." + INVALID_API_KEY = f"{error_prefix} Initialization failed. Invalid api key. Expected api key to be a string." + INVALID_ROLES_KEY_TYPE_IN_CONFIG = f"{error_prefix} Validation error. Invalid roles for {{}} with id {{}}. Specify roles as an array." + INVALID_ROLES_KEY_TYPE = f"{error_prefix} Validation error. Invalid roles. Specify roles as an array." + EMPTY_ROLES_IN_CONFIG = f"{error_prefix} Validation error. Invalid roles for {{}} with id {{}}. Specify at least one role." + EMPTY_ROLES = f"{error_prefix} Validation error. Invalid roles. Specify at least one role." + EMPTY_CONTEXT_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid context provided for {{}} with id {{}}. Specify context as type Context." + EMPTY_CONTEXT = f"{error_prefix} Initialization failed. Invalid context provided. Specify context as type Context." + INVALID_CONTEXT_IN_CONFIG = f"{error_prefix} Initialization failed. Invalid context for {{}} with id {{}}. Specify a valid context." + INVALID_CONTEXT = f"{error_prefix} Initialization failed. Invalid context. Specify a valid context." + INVALID_LOG_LEVEL = f"{error_prefix} Initialization failed. Invalid log level. Specify a valid log level." + EMPTY_LOG_LEVEL = f"{error_prefix} Initialization failed. Specify a valid log level." + + EMPTY_CONNECTION_ID = f"{error_prefix} Initialization failed. Invalid connection Id. Specify a valid connection Id." + INVALID_CONNECTION_ID = f"{error_prefix} Initialization failed. Invalid connection Id. Specify connection Id as a string." + EMPTY_CONNECTION_URL = f"{error_prefix} Initialization failed. Invalid connection Url for connection with id {{}}. Specify a valid connection Url." + INVALID_CONNECTION_URL = f"{error_prefix} Initialization failed. Invalid connection Url for connection with id {{}}. Specify connection Url as a string." + CONNECTION_ID_NOT_IN_CONFIG_LIST = f"{error_prefix} Validation error. {{}} is missing from the config. Specify the connectionIds from config." + RESPONSE_NOT_JSON = f"{error_prefix} Response {{}} is not valid JSON." + API_ERROR = f"{error_prefix} Server returned status code {{}}" + + MISSING_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Table name cannot be empty in insert request. Specify a table name." + INVALID_TABLE_NAME_IN_INSERT = f"{error_prefix} Validation error. Invalid table name in insert request. Specify a valid table name." + INVALID_TYPE_OF_DATA_IN_INSERT = f"{error_prefix} Validation error. Invalid type of data in insert request. Specify data as a object array." + EMPTY_DATA_IN_INSERT = f"{error_prefix} Validation error. Data array cannot be empty. Specify data in insert request." + INVALID_UPSERT_OPTIONS_TYPE = f"{error_prefix} Validation error. 'upsert' key cannot be empty in options. At least one object of table and column is required." + INVALID_HOMOGENEOUS_TYPE = f"{error_prefix} Validation error. Invalid type of homogeneous. Specify homogeneous as a string." + INVALID_TOKEN_MODE_TYPE = f"{error_prefix} Validation error. Invalid type of token mode. Specify token mode as a TokenMode enum." + INVALID_RETURN_TOKENS_TYPE = f"{error_prefix} Validation error. Invalid type of return tokens. Specify return tokens as a boolean." + INVALID_CONTINUE_ON_ERROR_TYPE = f"{error_prefix} Validation error. Invalid type of continue on error. Specify continue on error as a boolean." + TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE = f"{error_prefix} Validation error. 'token_mode' wasn't specified. Set 'token_mode' to 'ENABLE' to insert tokens." + INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT = f"{error_prefix} Validation error. 'token_mode' is set to 'ENABLE_STRICT', but some fields are missing tokens. Specify tokens for all fields." + NO_TOKENS_IN_INSERT = f"{error_prefix} Validation error. Tokens weren't specified for records while 'token_strict' was {{}}. Specify tokens." + BATCH_INSERT_FAILURE = f"{error_prefix} Insert operation failed." + GET_FAILURE = f"{error_prefix} Get operation failed." + HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT = f"{error_prefix} Validation error. Homogenous is not supported when upsert is passed." + + EMPTY_TABLE_VALUE = f"{error_prefix} Validation error. 'table' can't be empty. Specify a table." + INVALID_TABLE_VALUE = f"{error_prefix} Validation error. Invalid type of table. Specify table as a string" + EMPTY_RECORD_IDS_IN_DELETE = f"{error_prefix} Validation error. 'record ids' array can't be empty. Specify one or more record ids." + BULK_DELETE_FAILURE = f"{error_prefix} Delete operation failed." + + INVALID_QUERY_TYPE = f"{error_prefix} Validation error. Query parameter is of type {{}}. Specify as a string." + EMPTY_QUERY = f"{error_prefix} Validation error. Query parameter can't be empty. Specify as a string." + INVALID_QUERY_COMMAND = f"{error_prefix} Validation error. {{}} command was passed instead, but only SELECT commands are supported. Specify the SELECT command." + SERVER_ERROR = f"{error_prefix} Validation error. Check SkyflowError.data for details." + QUERY_FAILED = f"{error_prefix} Query operation failed." + DETOKENIZE_FIELD = f"{error_prefix} Detokenize operation failed." + UPDATE_FAILED = f"{error_prefix} Update operation failed." + TOKENIZE_FAILED = f"{error_prefix} Tokenize operation failed." + INVOKE_CONNECTION_FAILED = f"{error_prefix} Invoke Connection operation failed." + + INVALID_IDS_TYPE = f"{error_prefix} Validation error. 'ids' has a value of type {{}}. Specify 'ids' as list." + INVALID_REDACTION_TYPE = f"{error_prefix} Validation error. 'redaction' has a value of type {{}}. Specify 'redaction' as type Skyflow.Redaction." + INVALID_COLUMN_NAME = f"{error_prefix} Validation error. 'column' has a value of type {{}}. Specify 'column' as a string." + INVALID_COLUMN_VALUE = f"{error_prefix} Validation error. columnValues key has a value of type {{}}. Specify columnValues key as list." + INVALID_FIELDS_VALUE = f"{error_prefix} Validation error. fields key has a value of type{{}}. Specify fields key as list." + BOTH_OFFSET_AND_LIMIT_SPECIFIED = f"${error_prefix} Validation error. Both offset and limit cannot be present at the same time" + INVALID_OFF_SET_VALUE = f"{error_prefix} Validation error. offset key has a value of type {{}}. Specify offset key as integer." + INVALID_LIMIT_VALUE = f"{error_prefix} Validation error. limit key has a value of type {{}}. Specify limit key as integer." + INVALID_DOWNLOAD_URL_VALUE = f"{error_prefix} Validation error. download_url key has a value of type {{}}. Specify download_url key as boolean." + REDACTION_WITH_TOKENS_NOT_SUPPORTED = f"{error_prefix} Validation error. 'redaction' can't be used when tokens are specified. Remove 'redaction' from payload if tokens are specified." + TOKENS_GET_COLUMN_NOT_SUPPORTED = f"{error_prefix} Validation error. Column name and/or column values can't be used when tokens are specified. Remove unique column values or tokens from the payload." + BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED = f"{error_prefix} Validation error. Both Skyflow IDs and column details can't be specified. Either specify Skyflow IDs or unique column details." + INVALID_ORDER_BY_VALUE = f"{error_prefix} Validation error. order_by key has a value of type {{}}. Specify order_by key as Skyflow.OrderBy" + + UPDATE_FIELD_KEY_ERROR = f"{error_prefix} Validation error. Fields are empty in an update payload. Specify at least one field." + INVALID_FIELDS_TYPE = f"{error_prefix} Validation error. The 'data' key has a value of type {{}}. Specify 'data' as a dictionary." + IDS_KEY_ERROR = f"{error_prefix} Validation error. 'ids' key is missing from the payload. Specify an 'ids' key." + INVALID_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. The 'tokens' key has a value of type {{}}. Specify 'tokens' as a list." + EMPTY_TOKENS_LIST_VALUE = f"{error_prefix} Validation error. Tokens are empty in detokenize payload. Specify at lease one token" + + INVALID_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. The 'tokenize_parameters' key has a value of type {{}}. Specify 'tokenize_parameters' as a list." + EMPTY_TOKENIZE_PARAMETERS = f"{error_prefix} Validation error. Tokenize parameters are empty in tokenize payload. Specify at least one parameter." + INVALID_TOKENIZE_PARAMETER = f"{error_prefix} Validation error. Tokenize parameter at index {{}} has a value of type {{}}. Specify as a dictionary." + EMPTY_TOKENIZE_PARAMETER_VALUE = f"{error_prefix} Validation error. Tokenize parameter value at index {{}} is empty. Specify a valid value." + EMPTY_TOKENIZE_PARAMETER_COLUMN_GROUP = f"{error_prefix} Validation error. Tokenize parameter column group at index {{}} is empty. Specify a valid column group." + INVALID_TOKENIZE_PARAMETER_KEY = f"{error_prefix} Validation error. Tokenize parameter key at index {{}} is invalid. Specify a valid key value." + + INVALID_REQUEST_BODY = f"{error_prefix} Validation error. Invalid request body. Specify the request body as an object." + INVALID_REQUEST_HEADERS = f"{error_prefix} Validation error. Invalid request headers. Specify the request as an object." + INVALID_URL = f"{error_prefix} Validation error. Connection url {{}} is invalid. Specify a valid connection url." + INVALID_PATH_PARAMS = f"{error_prefix} Validation error. Path parameters aren't valid. Specify valid path parameters." + INVALID_QUERY_PARAMS = f"{error_prefix} Validation error. Query parameters aren't valid. Specify valid query parameters." + + MISSING_PRIVATE_KEY = f"{error_prefix} Initialization failed. Unable to read private key in credentials. Verify your private key." + MISSING_CLIENT_ID = f"{error_prefix} Initialization failed. Unable to read client ID in credentials. Verify your client ID." + MISSING_KEY_ID = f"{error_prefix} Initialization failed. Unable to read key ID in credentials. Verify your key ID." + MISSING_TOKEN_URI = f"{error_prefix} Initialization failed. Unable to read token URI in credentials. Verify your token URI." + JWT_INVALID_FORMAT = f"{error_prefix} Initialization failed. Invalid private key format. Verify your credentials." + JWT_DECODE_ERROR = f"{error_prefix} Validation error. Invalid access token. Verify your credentials." + FILE_INVALID_JSON = f"{error_prefix} Initialization failed. File at {{}} is not in valid JSON format. Verify the file contents." + INVALID_JSON_FORMAT_IN_CREDENTIALS_ENV = f"{error_prefix} Validation error. Invalid JSON format in SKYFLOW_CREDENTIALS environment variable." + + class Info(Enum): + CLIENT_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized skyflow client." + VALIDATING_VAULT_CONFIG = f"{INFO}: [{error_prefix}] Validating vault config." + VALIDATING_CONNECTION_CONFIG = f"{INFO}: [{error_prefix}] Validating connection config." + UNABLE_TO_GENERATE_SDK_METRIC = f"{INFO}: [{error_prefix}] Unable to generate {{}} metric." + VAULT_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized vault controller with vault ID {{}}." + CONNECTION_CONTROLLER_INITIALIZED = f"{INFO}: [{error_prefix}] Initialized connection controller with connection ID {{}}." + VAULT_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} already exists." + VAULT_CONFIG_DOES_NOT_EXIST = f"{INFO}: [{error_prefix}] Vault config with vault ID {{}} doesn't exist." + CONNECTION_CONFIG_EXISTS = f"{INFO}: [{error_prefix}] Connection config with connection ID {{}} already exists." + CONNECTION_CONFIG_DOES_NOT_EXIST = f"{INFO}: [{error_prefix}] Connection config with connection ID {{}} doesn't exist." + LOGGER_SETUP_DONE = f"{INFO}: [{error_prefix}] Set up logger." + CURRENT_LOG_LEVEL = f"{INFO}: [{error_prefix}] Current log level is {{}}." + + BEARER_TOKEN_EXPIRED = f"{INFO}: [{error_prefix}] Bearer token is expired." + GET_BEARER_TOKEN_TRIGGERED = f"{INFO}: [{error_prefix}] generate_bearer_token method triggered." + GET_BEARER_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Bearer token generated." + GET_SIGNED_DATA_TOKENS_TRIGGERED = f"{INFO}: [{error_prefix}] generate_signed_data_tokens method triggered." + GET_SIGNED_DATA_TOKEN_SUCCESS = f"{INFO}: [{error_prefix}] Signed data tokens generated." + GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED = f"{INFO}: [{error_prefix}] generate bearer_token_from_credential_string method triggered ." + REUSE_BEARER_TOKEN = f"{INFO}: [{error_prefix}] Reusing bearer token." + + + VALIDATE_INSERT_REQUEST = f"{INFO}: [{error_prefix}] Validating insert request." + INSERT_TRIGGERED = f"{INFO}: [{error_prefix}] Insert method triggered." + INSERT_SUCCESS = f"{INFO}: [{error_prefix}] Data inserted." + INSERT_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Insert request resolved." + + VALIDATE_UPDATE_REQUEST = f"{INFO}: [{error_prefix}] Validating update request." + UPDATE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Update request resolved." + UPDATE_SUCCESS = f"{INFO}: [{error_prefix}] Data updated." + UPDATE_TRIGGERED = f"{INFO}: [{error_prefix}] Update method triggered." + + DELETE_TRIGGERED = f"{INFO}: [{error_prefix}] Delete method triggered." + VALIDATING_DELETE_REQUEST = f"{INFO}: [{error_prefix}] Validating delete request." + DELETE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Delete request resolved." + DELETE_SUCCESS = f"{INFO}: [{error_prefix}] Data deleted." + + GET_TRIGGERED = f"{INFO}: [{error_prefix}] Get method triggered." + VALIDATE_GET_REQUEST = f"{INFO}: [{error_prefix}] Validating get request." + GET_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Get request resolved." + GET_SUCCESS = f"{INFO}: [{error_prefix}] Data revealed." + + QUERY_TRIGGERED = f"{INFO}: [{error_prefix}] Query method triggered." + VALIDATING_QUERY_REQUEST = f"{INFO}: [{error_prefix}] Validating query request." + QUERY_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Query request resolved." + QUERY_SUCCESS = f"{INFO}: [{error_prefix}] Query executed." + + DETOKENIZE_TRIGGERED = f"{INFO}: [{error_prefix}] Detokenize method triggered." + VALIDATE_DETOKENIZE_REQUEST = f"{INFO}: [{error_prefix}] Validating detokenize request." + DETOKENIZE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Detokenize request resolved." + DETOKENIZE_SUCCESS = f"{INFO}: [{error_prefix}] Data detokenized." + + TOKENIZE_TRIGGERED = f"{INFO}: [{error_prefix}] Tokenize method triggered." + VALIDATING_TOKENIZE_REQUEST = f"{INFO}: [{error_prefix}] Validating tokenize request." + TOKENIZE_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Tokenize request resolved." + TOKENIZE_SUCCESS = f"{INFO}: [{error_prefix}] Data tokenized." + + INVOKE_CONNECTION_TRIGGERED = f"{INFO}: [{error_prefix}] Invoke connection method triggered." + VALIDATING_INVOKE_CONNECTION_REQUEST = f"{INFO}: [{error_prefix}] Validating invoke connection request." + INVOKE_CONNECTION_REQUEST_RESOLVED = f"{INFO}: [{error_prefix}] Invoke connection request resolved." + INVOKE_CONNECTION_SUCCESS = f"{INFO}: [{error_prefix}] Invoke Connection Success." + + class ErrorLogs(Enum): + VAULTID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID is required." + EMPTY_VAULTID = f"{ERROR}: [{error_prefix}] Invalid vault config. Vault ID can not be empty." + CLUSTER_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Cluster ID is required." + EMPTY_CLUSTER_ID = f"{ERROR}: [{error_prefix}] Invalid vault config. Cluster ID can not be empty." + ENV_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid vault config. Env is required." + CONNECTION_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection ID is required." + EMPTY_CONNECTION_ID = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection ID can not be empty." + CONNECTION_URL_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL is required." + EMPTY_CONNECTION_URL = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL can not be empty." + INVALID_CONNECTION_URL = f"{ERROR}: [{error_prefix}] Invalid connection config. Connection URL is not a valid URL." + EMPTY_CREDENTIALS_PATH = f"{ERROR}: [{error_prefix}] Invalid credentials. Credentials path can not be empty." + EMPTY_CREDENTIALS_STRING = f"{ERROR}: [{error_prefix}] Invalid credentials. Credentials string can not be empty." + EMPTY_TOKEN_VALUE = f"{ERROR}: [{error_prefix}] Invalid credentials. Token can not be empty." + EMPTY_API_KEY_VALUE = f"{ERROR}: [{error_prefix}] Invalid credentials. Api key can not be empty." + INVALID_API_KEY = f"{ERROR}: [{error_prefix}] Invalid credentials. Api key is invalid." + + INVALID_BEARER_TOKEN = f"{ERROR}: [{error_prefix}] Bearer token is invalid or expired." + INVALID_CREDENTIALS_FILE = f"{ERROR}: [{error_prefix}] Credentials file is either null or an invalid file." + INVALID_CREDENTIALS_STRING_FORMAT = f"{ERROR}: [{error_prefix}] Credentials string in not in a valid JSON string format." + PRIVATE_KEY_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Private key is required." + CLIENT_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Client ID is required." + KEY_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Key ID is required." + TOKEN_URI_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Token URI is required." + INVALID_TOKEN_URI = f"{ERROR}: [{error_prefix}] Invalid value for token URI in credentials." + + + TABLE_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Table is required." + EMPTY_TABLE_NAME =f"{ERROR}: [{error_prefix}] Invalid {{}} request. Table name can not be empty." + VALUES_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Values are required." + EMPTY_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Values can not be empty." + EMPTY_OR_NULL_VALUE_IN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Value can not be null or empty in values for key {{}}." + EMPTY_OR_NULL_KEY_IN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Key can not be null or empty in values." + EMPTY_UPSERT = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Upsert can not be empty." + HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Homogenous is not supported when upsert is passed." + EMPTY_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens can not be empty." + EMPTY_OR_NULL_VALUE_IN_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Value can not be null or empty in tokens for key {{}}." + EMPTY_OR_NULL_KEY_IN_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Key can not be null or empty in tokens." + MISMATCH_OF_FIELDS_AND_TOKENS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Keys for values and tokens are not matching." + + EMPTY_IDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Ids can not be empty." + EMPTY_OR_NULL_ID_IN_IDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Id can not be null or empty in ids at index {{}}." + TOKENIZATION_NOT_SUPPORTED_WITH_REDACTION= f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokenization is not supported when redaction is applied." + TOKENIZATION_SUPPORTED_ONLY_WITH_IDS=f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokenization is not supported when column name and values are passed." + TOKENS_NOT_ALLOWED_WITH_BYOT_DISABLE = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens are not allowed when token_strict is DISABLE." + INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT =f"{ERROR}: [{error_prefix}] Invalid {{}} request. For tokenStrict as ENABLE_STRICT, tokens should be passed for all fields." + TOKENS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Tokens are required." + EMPTY_FIELDS = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Fields can not be empty." + EMPTY_OFFSET = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Offset ca not be empty." + NEITHER_IDS_NOR_COLUMN_NAME_PASSED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Neither ids nor column name and values are passed." + BOTH_IDS_AND_COLUMN_NAME_PASSED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Both ids and column name and values are passed." + COLUMN_NAME_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column name is required when column values are passed." + COLUMN_VALUES_IS_REQUIRED_GET = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column values are required when column name is passed." + SKYFLOW_ID_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Skyflow Id is required." + EMPTY_SKYFLOW_ID = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Skyflow Id can not be empty." + + COLUMN_VALUES_IS_REQUIRED_TOKENIZE = f"{ERROR}: [{error_prefix}] Invalid {{}} request. ColumnValues are required." + EMPTY_COLUMN_GROUP_IN_COLUMN_VALUES = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Column group can not be null or empty in column values at index %s2." + + EMPTY_QUERY= f"{ERROR}: [{error_prefix}] Invalid {{}} request. Query can not be empty." + QUERY_IS_REQUIRED = f"{ERROR}: [{error_prefix}] Invalid {{}} request. Query is required." + + INSERT_RECORDS_REJECTED = f"{ERROR}: [{error_prefix}] Insert call resulted in failure." + DETOKENIZE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Detokenize request resulted in failure." + DELETE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Delete request resulted in failure." + TOKENIZE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Tokenize request resulted in failure." + UPDATE_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Update request resulted in failure." + QUERY_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Query request resulted in failure." + GET_REQUEST_REJECTED = f"{ERROR}: [{error_prefix}] Get request resulted in failure." + + class Interface(Enum): + INSERT = "INSERT" + GET = "GET" + QUERY = "QUERY" + DETOKENIZE = " DETOKENIZE" + TOKENIZE = "TOKENIZE" + UPDATE = "UPDATE" + DELETE = "DELETE" + + + class Warning(Enum): + WARNING_MESSAGE = "WARNING MESSAGE" + + + diff --git a/skyflow/utils/_utils.py b/skyflow/utils/_utils.py new file mode 100644 index 00000000..fa17a70e --- /dev/null +++ b/skyflow/utils/_utils.py @@ -0,0 +1,379 @@ +import os +import json +import urllib.parse +from dotenv import load_dotenv +from requests.sessions import PreparedRequest +from requests.models import HTTPError +import requests +import platform +import sys +import re +from skyflow.error import SkyflowError +from skyflow.generated.rest import V1UpdateRecordResponse, V1BulkDeleteRecordResponse, \ + V1DetokenizeResponse, V1TokenizeResponse, V1GetQueryResponse, V1BulkGetRecordResponse +from skyflow.utils.logger import log_error, log_error_log + +from . import SkyflowMessages, SDK_VERSION +from .enums import Env, ContentType, EnvUrls +from skyflow.vault.data import InsertResponse, UpdateResponse, DeleteResponse, QueryResponse, GetResponse +from .validations import validate_invoke_connection_params +from ..vault.connection import InvokeConnectionResponse +from ..vault.tokens import DetokenizeResponse, TokenizeResponse + +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def get_credentials(config_level_creds = None, common_skyflow_creds = None, logger = None): + dotenv_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ".env") + if dotenv_path: + load_dotenv(dotenv_path) + env_skyflow_credentials = os.getenv("SKYFLOW_CREDENTIALS") + if config_level_creds: + return config_level_creds + if common_skyflow_creds: + return common_skyflow_creds + if env_skyflow_credentials: + env_skyflow_credentials.strip() + try: + env_creds = env_skyflow_credentials.replace('\n', '\\n') + return { + 'credentials_string': env_creds + } + except json.JSONDecodeError: + raise SkyflowError(SkyflowMessages.Error.INVALID_JSON_FORMAT_IN_CREDENTIALS_ENV.value, invalid_input_error_code) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_CREDENTIALS.value, invalid_input_error_code) + +def validate_api_key(api_key: str, logger = None) -> bool: + if len(api_key) != 42: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_API_KEY.value, logger = logger) + return False + api_key_pattern = re.compile(r'^sky-[a-zA-Z0-9]{5}-[a-fA-F0-9]{32}$') + + return bool(api_key_pattern.match(api_key)) + +def get_vault_url(cluster_id, env,vault_id, logger = None): + if not cluster_id or not isinstance(cluster_id, str) or not cluster_id.strip(): + raise SkyflowError(SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id), invalid_input_error_code) + + if env not in Env: + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + base_url = EnvUrls[env.name].value + protocol = "https" if env != Env.PROD else "http" + + return f"{protocol}://{cluster_id}.{base_url}" + +def parse_path_params(url, path_params): + result = url + for param, value in path_params.items(): + result = result.replace('{' + param + '}', value) + + return result + +def to_lowercase_keys(dict): + result = {} + for key, value in dict.items(): + result[key.lower()] = value + + return result + +def construct_invoke_connection_request(request, connection_url, logger) -> PreparedRequest: + url = parse_path_params(connection_url.rstrip('/'), request.path_params) + + try: + if isinstance(request.headers, dict): + header = to_lowercase_keys(json.loads( + json.dumps(request.headers))) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value, invalid_input_error_code) + except Exception: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value, invalid_input_error_code) + + if not 'Content-Type'.lower() in header: + header['content-type'] = ContentType.JSON.value + + try: + if isinstance(request.body, dict): + json_data, files = get_data_from_content_type( + request.body, header["content-type"] + ) + else: + raise SkyflowError(SkyflowMessages.Error.INVALID_REQUEST_BODY.value, invalid_input_error_code) + except Exception as e: + raise SkyflowError( SkyflowMessages.Error.INVALID_REQUEST_BODY.value, invalid_input_error_code) + + validate_invoke_connection_params(logger, request.query_params, request.path_params) + + try: + return requests.Request( + method = request.method.value, + url = url, + data = json_data, + headers = header, + params = request.query_params, + files = files + ).prepare() + except requests.exceptions.InvalidURL: + raise SkyflowError(SkyflowMessages.Error.INVALID_URL.value.format(connection_url), invalid_input_error_code) + + +def http_build_query(data): + return urllib.parse.urlencode(r_urlencode(list(), dict(), data)) + +def r_urlencode(parents, pairs, data): + if isinstance(data, list) or isinstance(data, tuple): + for i in range(len(data)): + parents.append(i) + r_urlencode(parents, pairs, data[i]) + parents.pop() + elif isinstance(data, dict): + for key, value in data.items(): + parents.append(key) + r_urlencode(parents, pairs, value) + parents.pop() + else: + pairs[render_key(parents)] = str(data) + + return pairs + +def render_key(parents): + depth, out_str = 0, '' + for x in parents: + s = "[%s]" if depth > 0 or isinstance(x, int) else "%s" + out_str += s % str(x) + depth += 1 + return out_str + +def get_data_from_content_type(data, content_type): + converted_data = data + files = {} + if content_type == ContentType.URLENCODED.value: + converted_data = http_build_query(data) + elif content_type == ContentType.FORMDATA.value: + converted_data = r_urlencode(list(), dict(), data) + files = {(None, None)} + elif content_type == ContentType.JSON.value: + converted_data = json.dumps(data) + + return converted_data, files + + +def get_metrics(): + sdk_name_version = "skyflow-python@" + SDK_VERSION + + try: + sdk_client_device_model = platform.node() + except Exception: + sdk_client_device_model = "" + + try: + sdk_client_os_details = sys.platform + except Exception: + sdk_client_os_details = "" + + try: + sdk_runtime_details = sys.version + except Exception: + sdk_runtime_details = "" + + details_dic = { + 'sdk_name_version': sdk_name_version, + 'sdk_client_device_model': sdk_client_device_model, + 'sdk_client_os_details': sdk_client_os_details, + 'sdk_runtime_details': "Python " + sdk_runtime_details, + } + return details_dic + + +def parse_insert_response(api_response, continue_on_error): + inserted_fields = [] + errors = [] + insert_response = InsertResponse() + if continue_on_error: + for idx, response in enumerate(api_response.responses): + if response['Status'] == 200: + body = response['Body'] + if 'records' in body: + for record in body['records']: + inserted_field = { + 'skyflow_id': record['skyflow_id'], + 'request_index': idx + } + + if 'tokens' in record: + inserted_field.update(record['tokens']) + inserted_fields.append(inserted_field) + elif response['Status'] == 400: + error = { + 'request_index': idx, + 'error': response['Body']['error'] + } + errors.append(error) + + insert_response.inserted_fields = inserted_fields + insert_response.errors = errors + + else: + for record in api_response.records: + field_data = { + 'skyflow_id': record.skyflow_id + } + + if record.tokens: + field_data.update(record.tokens) + + inserted_fields.append(field_data) + insert_response.inserted_fields = inserted_fields + + return insert_response + +def parse_update_record_response(api_response: V1UpdateRecordResponse): + update_response = UpdateResponse() + updated_field = dict() + updated_field['skyflow_id'] = api_response.skyflow_id + if api_response.tokens is not None: + updated_field.update(api_response.tokens) + + update_response.updated_field = updated_field + + return update_response + +def parse_delete_response(api_response: V1BulkDeleteRecordResponse): + delete_response = DeleteResponse() + deleted_ids = api_response.record_id_response + delete_response.deleted_ids = deleted_ids + delete_response.errors = [] + return delete_response + + +def parse_get_response(api_response: V1BulkGetRecordResponse): + get_response = GetResponse() + data = [] + errors = [] + for record in api_response.records: + field_data = {field: value for field, value in record.fields.items()} + data.append(field_data) + + get_response.data = data + get_response.errors = errors + + return get_response + +def parse_detokenize_response(api_response: V1DetokenizeResponse): + detokenized_fields = [] + errors = [] + + for record in api_response.records: + if record.error: + errors.append({ + "token": record.token, + "error": record.error + }) + else: + value_type = record.value_type.value if record.value_type else None + detokenized_fields.append({ + "token": record.token, + "value": record.value, + "type": value_type + }) + + detokenized_fields = detokenized_fields + errors = errors + detokenize_response = DetokenizeResponse() + detokenize_response.detokenized_fields = detokenized_fields + detokenize_response.errors = errors + + return detokenize_response + +def parse_tokenize_response(api_response: V1TokenizeResponse): + tokenize_response = TokenizeResponse() + tokenized_fields = [{"token": record.token} for record in api_response.records] + + tokenize_response.tokenized_fields = tokenized_fields + + return tokenize_response + +def parse_query_response(api_response: V1GetQueryResponse): + query_response = QueryResponse() + fields = [] + for record in api_response.records: + field_object = { + **record.fields, + "tokenized_data": {} + } + fields.append(field_object) + query_response.fields = fields + return query_response + +def parse_invoke_connection_response(api_response: requests.Response): + invoke_connection_response = InvokeConnectionResponse() + + status_code = api_response.status_code + content = api_response.content + if isinstance(content, bytes): + content = content.decode('utf-8') + try: + api_response.raise_for_status() + try: + json_content = json.loads(content) + if 'x-request-id' in api_response.headers: + request_id = api_response.headers['x-request-id'] + json_content['request_id'] = request_id + + invoke_connection_response.response = json_content + return invoke_connection_response + except: + raise SkyflowError(SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content), status_code) + except HTTPError: + message = SkyflowMessages.Error.API_ERROR.value.format(status_code) + if api_response and api_response.content: + try: + error_response = json.loads(content) + if isinstance(error_response.get('error'), dict) and 'message' in error_response['error']: + message = error_response['error']['message'] + except json.JSONDecodeError: + message = SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format(content) + + if 'x-request-id' in api_response.headers: + message += ' - request id: ' + api_response.headers['x-request-id'] + + raise SkyflowError(message, status_code) + + +def log_and_reject_error(description, status_code, request_id, http_status=None, grpc_code=None, details=None, logger = None): + log_error(description, status_code, request_id, grpc_code, http_status, details, logger= logger) + +def handle_exception(error, logger): + request_id = error.headers.get('x-request-id', 'unknown-request-id') + content_type = error.headers.get('content-type') + data = error.body + + if content_type: + if 'application/json' in content_type: + handle_json_error(error, data, request_id, logger) + elif 'text/plain' in content_type: + handle_text_error(error, data, request_id, logger) + else: + handle_generic_error(error, request_id, logger) + else: + handle_generic_error(error, request_id, logger) + +def handle_json_error(err, data, request_id, logger): + try: + description = json.loads(data) + status_code = description.get('error', {}).get('http_code', 500) # Default to 500 if not found + http_status = description.get('error', {}).get('http_status') + grpc_code = description.get('error', {}).get('grpc_code') + details = description.get('error', {}).get('details') + + description_message = description.get('error', {}).get('message', "An unknown error occurred.") + log_and_reject_error(description_message, status_code, request_id, http_status, grpc_code, details, logger = logger) + except json.JSONDecodeError: + log_and_reject_error("Invalid JSON response received.", err, request_id, logger = logger) + +def handle_text_error(err, data, request_id, logger): + log_and_reject_error(data, err.status, request_id, logger = logger) + +def handle_generic_error(err, request_id, logger): + description = "An error occurred." + log_and_reject_error(description, err.status, request_id, logger = logger) diff --git a/skyflow/utils/_version.py b/skyflow/utils/_version.py new file mode 100644 index 00000000..5d58569b --- /dev/null +++ b/skyflow/utils/_version.py @@ -0,0 +1 @@ +SDK_VERSION = '1.15.1.dev0+7d27637' \ No newline at end of file diff --git a/skyflow/utils/enums/__init__.py b/skyflow/utils/enums/__init__.py new file mode 100644 index 00000000..5456737b --- /dev/null +++ b/skyflow/utils/enums/__init__.py @@ -0,0 +1,6 @@ +from .env import Env, EnvUrls +from .log_level import LogLevel +from .content_types import ContentType +from .token_mode import TokenMode +from .request_method import RequestMethod +from .redaction_type import RedactionType \ No newline at end of file diff --git a/skyflow/utils/enums/content_types.py b/skyflow/utils/enums/content_types.py new file mode 100644 index 00000000..362c286a --- /dev/null +++ b/skyflow/utils/enums/content_types.py @@ -0,0 +1,8 @@ +from enum import Enum + +class ContentType(Enum): + JSON = 'application/json' + PLAINTEXT = 'text/plain' + XML = 'text/xml' + URLENCODED = 'application/x-www-form-urlencoded' + FORMDATA = 'multipart/form-data' \ No newline at end of file diff --git a/skyflow/utils/enums/env.py b/skyflow/utils/enums/env.py new file mode 100644 index 00000000..862f8f8a --- /dev/null +++ b/skyflow/utils/enums/env.py @@ -0,0 +1,13 @@ +from enum import Enum + +class Env(Enum): + DEV = 'DEV', + SANDBOX = 'SANDBOX', + PROD = 'PROD' + STAGE = 'STAGE' + +class EnvUrls(Enum): + PROD = "vault.skyflowapis.com", + SANDBOX = "vault.skyflowapis-preview.com", + DEV = "vault.skyflowapis.dev" + STAGE = "vault.skyflowapis.tech" \ No newline at end of file diff --git a/skyflow/utils/enums/log_level.py b/skyflow/utils/enums/log_level.py new file mode 100644 index 00000000..c92e9149 --- /dev/null +++ b/skyflow/utils/enums/log_level.py @@ -0,0 +1,8 @@ +from enum import Enum + +class LogLevel(Enum): + DEBUG = 1 + INFO = 2 + WARN = 3 + ERROR = 4 + OFF = 5 diff --git a/skyflow/utils/enums/redaction_type.py b/skyflow/utils/enums/redaction_type.py new file mode 100644 index 00000000..85310048 --- /dev/null +++ b/skyflow/utils/enums/redaction_type.py @@ -0,0 +1,8 @@ +from enum import Enum +from skyflow.generated.rest import RedactionEnumREDACTION + +class RedactionType(Enum): + PLAIN_TEXT = RedactionEnumREDACTION.PLAIN_TEXT + MASKED = RedactionEnumREDACTION.MASKED + DEFAULT = RedactionEnumREDACTION.DEFAULT + REDACTED = RedactionEnumREDACTION.REDACTED diff --git a/skyflow/utils/enums/request_method.py b/skyflow/utils/enums/request_method.py new file mode 100644 index 00000000..61efef3d --- /dev/null +++ b/skyflow/utils/enums/request_method.py @@ -0,0 +1,8 @@ +from enum import Enum + +class RequestMethod(Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + NONE = "NONE" \ No newline at end of file diff --git a/skyflow/utils/enums/token_mode.py b/skyflow/utils/enums/token_mode.py new file mode 100644 index 00000000..650f9a96 --- /dev/null +++ b/skyflow/utils/enums/token_mode.py @@ -0,0 +1,7 @@ +from enum import Enum +from skyflow.generated.rest import V1BYOT + +class TokenMode(Enum): + DISABLE = V1BYOT.DISABLE + ENABLE = V1BYOT.ENABLE + ENABLE_STRICT = V1BYOT.ENABLE_STRICT \ No newline at end of file diff --git a/skyflow/utils/logger/__init__.py b/skyflow/utils/logger/__init__.py new file mode 100644 index 00000000..2993b8fc --- /dev/null +++ b/skyflow/utils/logger/__init__.py @@ -0,0 +1,2 @@ +from ._logger import Logger +from ._log_helpers import log_error, log_info, log_error_log \ No newline at end of file diff --git a/skyflow/utils/logger/_log_helpers.py b/skyflow/utils/logger/_log_helpers.py new file mode 100644 index 00000000..fdb11ea9 --- /dev/null +++ b/skyflow/utils/logger/_log_helpers.py @@ -0,0 +1,34 @@ +from ..enums import LogLevel +from . import Logger + + +def log_info(message, logger = None): + if not logger: + logger = Logger(LogLevel.INFO) + + logger.info(message) + +def log_error_log(message, logger=None): + if not logger: + logger = Logger(LogLevel.ERROR) + logger.error(message) + +def log_error(message, http_code, request_id=None, grpc_code=None, http_status=None, details=None, logger=None): + if not logger: + logger = Logger(LogLevel.ERROR) + + log_data = { + 'http_code': http_code, + 'message': message + } + + if grpc_code is not None: + log_data['grpc_code'] = grpc_code + if http_status is not None: + log_data['http_status'] = http_status + if request_id is not None: + log_data['request_id'] = request_id + if details is not None: + log_data['details'] = details + + logger.error(log_data) \ No newline at end of file diff --git a/skyflow/utils/logger/_logger.py b/skyflow/utils/logger/_logger.py new file mode 100644 index 00000000..45519fb1 --- /dev/null +++ b/skyflow/utils/logger/_logger.py @@ -0,0 +1,50 @@ +import logging +from ..enums.log_level import LogLevel + + +class Logger: + def __init__(self, level=LogLevel.ERROR): + self.current_level = level + self.logger = logging.getLogger('skyflow-python') + self.logger.propagate = False # Prevent logs from being handled by parent loggers + + # Remove any existing handlers to avoid duplicates or inherited handlers + if self.logger.hasHandlers(): + self.logger.handlers.clear() + + self.set_log_level(level) + + handler = logging.StreamHandler() + + # Create a formatter that only includes the message without any prefixes + formatter = logging.Formatter('%(message)s') + handler.setFormatter(formatter) + + self.logger.addHandler(handler) + + def set_log_level(self, level): + self.current_level = level + log_level_mapping = { + LogLevel.DEBUG: logging.DEBUG, + LogLevel.INFO: logging.INFO, + LogLevel.WARN: logging.WARNING, + LogLevel.ERROR: logging.ERROR, + LogLevel.OFF: logging.CRITICAL + 1 + } + self.logger.setLevel(log_level_mapping[level]) + + def debug(self, message): + if self.current_level.value <= LogLevel.DEBUG.value: + self.logger.debug(message) + + def info(self, message): + if self.current_level.value <= LogLevel.INFO.value: + self.logger.info(message) + + def warn(self, message): + if self.current_level.value <= LogLevel.WARN.value: + self.logger.warning(message) + + def error(self, message): + if self.current_level.value <= LogLevel.ERROR.value: + self.logger.error(message) diff --git a/skyflow/utils/validations/__init__.py b/skyflow/utils/validations/__init__.py new file mode 100644 index 00000000..17bc49a7 --- /dev/null +++ b/skyflow/utils/validations/__init__.py @@ -0,0 +1,16 @@ +from ._validations import ( + validate_vault_config, + validate_insert_request, + validate_connection_config, + validate_update_vault_config, + validate_update_connection_config, + validate_credentials, + validate_log_level, + validate_delete_request, + validate_query_request, + validate_get_request, + validate_update_request, + validate_detokenize_request, + validate_tokenize_request, + validate_invoke_connection_params, +) \ No newline at end of file diff --git a/skyflow/utils/validations/_validations.py b/skyflow/utils/validations/_validations.py new file mode 100644 index 00000000..3e5c1823 --- /dev/null +++ b/skyflow/utils/validations/_validations.py @@ -0,0 +1,559 @@ +import json +import re +from skyflow.service_account import is_expired +from skyflow.utils.enums import LogLevel, Env, RedactionType, TokenMode +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.logger import log_info, log_error_log + +valid_vault_config_keys = ["vault_id", "cluster_id", "credentials", "env"] +valid_connection_config_keys = ["connection_id", "connection_url", "credentials"] +valid_credentials_keys = ["path", "roles", "context", "token", "credentials_string"] +invalid_input_error_code = SkyflowMessages.ErrorCodes.INVALID_INPUT.value + +def validate_required_field(logger, config, field_name, expected_type, empty_error, invalid_error): + field_value = config.get(field_name) + + if field_name not in config or not isinstance(field_value, expected_type): + if field_name == "vault_id": + logger.error(SkyflowMessages.ErrorLogs.VAULTID_IS_REQUIRED.value) + if field_name == "cluster_id": + logger.error(SkyflowMessages.ErrorLogs.CLUSTER_ID_IS_REQUIRED.value) + if field_name == "connection_id": + logger.error(SkyflowMessages.ErrorLogs.CONNECTION_ID_IS_REQUIRED.value) + if field_name == "connection_url": + logger.error(SkyflowMessages.ErrorLogs.INVALID_CONNECTION_URL.value) + raise SkyflowError(invalid_error, invalid_input_error_code) + + if isinstance(field_value, str) and not field_value.strip(): + if field_name == "vault_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_VAULTID.value) + if field_name == "cluster_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CLUSTER_ID.value) + if field_name == "connection_id": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CONNECTION_ID.value) + if field_name == "connection_url": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CONNECTION_URL.value) + if field_name == "path": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CREDENTIALS_PATH.value) + if field_name == "credentials_string": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_CREDENTIALS_STRING.value) + if field_name == "token": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_TOKEN_VALUE.value) + if field_name == "api_key": + logger.error(SkyflowMessages.ErrorLogs.EMPTY_API_KEY_VALUE.value) + raise SkyflowError(empty_error, invalid_input_error_code) + +def validate_api_key(api_key: str, logger = None) -> bool: + if len(api_key) != 42: + log_error_log(SkyflowMessages.ErrorLogs.INVALID_API_KEY.value, logger = logger) + return False + api_key_pattern = re.compile(r'^sky-[a-zA-Z0-9]{5}-[a-fA-F0-9]{32}$') + + return bool(api_key_pattern.match(api_key)) + +def validate_credentials(logger, credentials, config_id_type=None, config_id=None): + key_present = [k for k in ["path", "token", "credentials_string", "api_key"] if credentials.get(k)] + + if len(key_present) == 0: + error_message = ( + SkyflowMessages.Error.INVALID_CREDENTIALS_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else + SkyflowMessages.Error.INVALID_CREDENTIALS.value + ) + raise SkyflowError(error_message, invalid_input_error_code) + elif len(key_present) > 1: + error_message = ( + SkyflowMessages.Error.MULTIPLE_CREDENTIALS_PASSED_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else + SkyflowMessages.Error.MULTIPLE_CREDENTIALS_PASSED.value + ) + raise SkyflowError(error_message, invalid_input_error_code) + + if "roles" in credentials: + validate_required_field( + logger, credentials, "roles", list, + SkyflowMessages.Error.INVALID_ROLES_KEY_TYPE_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_ROLES_KEY_TYPE.value, + SkyflowMessages.Error.EMPTY_ROLES_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_ROLES.value + ) + + if "context" in credentials: + validate_required_field( + logger, credentials, "context", str, + SkyflowMessages.Error.EMPTY_CONTEXT_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CONTEXT.value, + SkyflowMessages.Error.INVALID_CONTEXT_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CONTEXT.value + ) + + if "credentials_string" in credentials: + validate_required_field( + logger, credentials, "credentials_string", str, + SkyflowMessages.Error.EMPTY_CREDENTIALS_STRING_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIALS_STRING.value, + SkyflowMessages.Error.INVALID_CREDENTIALS_STRING_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value + ) + elif "path" in credentials: + validate_required_field( + logger, credentials, "path", str, + SkyflowMessages.Error.EMPTY_CREDENTIAL_FILE_PATH_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIAL_FILE_PATH.value, + SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH_IN_CONFIG.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value + ) + elif "token" in credentials: + validate_required_field( + logger, credentials, "token", str, + SkyflowMessages.Error.EMPTY_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_CREDENTIALS_TOKEN.value, + SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value + ) + if is_expired(credentials.get("token"), logger): + raise SkyflowError( + SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_CREDENTIALS_TOKEN.value, + invalid_input_error_code + ) + elif "api_key" in credentials: + validate_required_field( + logger, credentials, "api_key", str, + SkyflowMessages.Error.EMPTY_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.EMPTY_API_KEY.value, + SkyflowMessages.Error.INVALID_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_API_KEY.value + ) + if not validate_api_key(credentials.get("api_key"), logger): + raise SkyflowError(SkyflowMessages.Error.INVALID_API_KEY.value.format(config_id_type, config_id) + if config_id_type and config_id else SkyflowMessages.Error.INVALID_API_KEY.value, + invalid_input_error_code) + +def validate_log_level(logger, log_level): + if not isinstance(log_level, LogLevel): + raise SkyflowError( SkyflowMessages.Error.INVALID_LOG_LEVEL.value, invalid_input_error_code) + + if log_level is None: + raise SkyflowError(SkyflowMessages.Error.EMPTY_LOG_LEVEL.value, invalid_input_error_code) + +def validate_keys(logger, config, config_keys): + for key in config.keys(): + if key not in config_keys: + raise SkyflowError(SkyflowMessages.Error.INVALID_KEY.value.format(key), invalid_input_error_code) + +def validate_vault_config(logger, config): + log_info(SkyflowMessages.Info.VALIDATING_VAULT_CONFIG.value, logger) + validate_keys(logger, config, valid_vault_config_keys) + + # Validate vault_id (string, not empty) + validate_required_field( + logger, config, "vault_id", str, + SkyflowMessages.Error.EMPTY_VAULT_ID.value, + SkyflowMessages.Error.INVALID_VAULT_ID.value + ) + vault_id = config.get("vault_id") + # Validate cluster_id (string, not empty) + validate_required_field( + logger, config, "cluster_id", str, + SkyflowMessages.Error.EMPTY_CLUSTER_ID.value.format(vault_id), + SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id) + ) + + # Validate credentials (dict, not empty) + if "credentials" in config and not config.get("credentials"): + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("vault", vault_id), invalid_input_error_code) + + if "credentials" in config and config.get("credentials"): + validate_credentials(logger, config.get("credentials"), "vault", vault_id) + + # Validate env (optional, should be one of LogLevel values) + if "env" in config and config.get("env") not in Env: + logger.error(SkyflowMessages.ErrorLogs.VAULTID_IS_REQUIRED.value) + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + return True + +def validate_update_vault_config(logger, config): + + validate_keys(logger, config, valid_vault_config_keys) + + # Validate vault_id (string, not empty) + validate_required_field( + logger, config, "vault_id", str, + SkyflowMessages.Error.EMPTY_VAULT_ID.value, + SkyflowMessages.Error.INVALID_VAULT_ID.value + ) + + vault_id = config.get("vault_id") + + if "cluster_id" in config and not config.get("cluster_id"): + raise SkyflowError(SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(vault_id), invalid_input_error_code) + + if "env" in config and config.get("env") not in Env: + raise SkyflowError(SkyflowMessages.Error.INVALID_ENV.value.format(vault_id), invalid_input_error_code) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("vault", vault_id), invalid_input_error_code) + + validate_credentials(logger, config.get("credentials"), "vault", vault_id) + + return True + +def validate_connection_config(logger, config): + log_info(SkyflowMessages.Info.VALIDATING_CONNECTION_CONFIG.value, logger) + validate_keys(logger, config, valid_connection_config_keys) + + validate_required_field( + logger, config, "connection_id" , str, + SkyflowMessages.Error.EMPTY_CONNECTION_ID.value, + SkyflowMessages.Error.INVALID_CONNECTION_ID.value + ) + + connection_id = config.get("connection_id") + + validate_required_field( + logger, config, "connection_url", str, + SkyflowMessages.Error.EMPTY_CONNECTION_URL.value.format(connection_id), + SkyflowMessages.Error.INVALID_CONNECTION_URL.value.format(connection_id) + ) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("connection", connection_id), invalid_input_error_code) + + validate_credentials(logger, config.get("credentials"), "connection", connection_id) + + return True + +def validate_update_connection_config(logger, config): + + validate_keys(logger, config, valid_connection_config_keys) + + validate_required_field( + logger, config, "connection_id", str, + SkyflowMessages.Error.EMPTY_CONNECTION_ID.value, + SkyflowMessages.Error.INVALID_CONNECTION_ID.value + ) + + connection_id = config.get("connection_id") + + validate_required_field( + logger, config, "connection_url", str, + SkyflowMessages.Error.EMPTY_CONNECTION_URL.value.format(connection_id), + SkyflowMessages.Error.INVALID_CONNECTION_URL.value.format(connection_id) + ) + + if "credentials" not in config: + raise SkyflowError(SkyflowMessages.Error.EMPTY_CREDENTIALS.value.format("connection", connection_id), invalid_input_error_code) + validate_credentials(logger, config.get("credentials")) + + return True + + +def validate_insert_request(logger, request): + if not isinstance(request.table_name, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) + if not request.table_name.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.MISSING_TABLE_NAME_IN_INSERT.value, invalid_input_error_code) + + if not isinstance(request.values, list) or not all(isinstance(v, dict) for v in request.values): + log_error_log(SkyflowMessages.ErrorLogs.VALUES_IS_REQUIRED.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if not len(request.values): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_VALUES.value.format("INSERT"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_DATA_IN_INSERT.value, invalid_input_error_code) + + for i, item in enumerate(request.values, start=1): + for key, value in item.items(): + if key is None or key == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_KEY_IN_VALUES.value.format("INSERT"), logger = logger) + + if value is None or value == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_VALUE_IN_VALUES.value.format("INSERT", key), logger = logger) + + if request.upsert is not None and (not isinstance(request.upsert, str) or not request.upsert.strip()): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_UPSERT.value("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_UPSERT_OPTIONS_TYPE.value, invalid_input_error_code) + + if request.homogeneous is not None and not isinstance(request.homogeneous, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_HOMOGENEOUS_TYPE.value, invalid_input_error_code) + + if request.upsert and request.homogeneous: + log_error_log(SkyflowMessages.ErrorLogs.HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.HOMOGENOUS_NOT_SUPPORTED_WITH_UPSERT.value.format("INSERT"), invalid_input_error_code) + + if request.token_mode is not None: + if not isinstance(request.token_mode, TokenMode): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_MODE_TYPE.value, invalid_input_error_code) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if not isinstance(request.continue_on_error, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value, invalid_input_error_code) + + if request.tokens: + for i, item in enumerate(request.tokens, start=1): + for key, value in item.items(): + if key is None or key == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_VALUE_IN_TOKENS.value.format("INSERT"), + logger=logger) + + if value is None or value == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_KEY_IN_TOKENS.value.format("INSERT", key), + logger=logger) + if not isinstance(request.tokens, list) or not request.tokens or not all( + isinstance(t, dict) for t in request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE and not request.tokens: + raise SkyflowError(SkyflowMessages.Error.NO_TOKENS_IN_INSERT.value.format(request.token_mode), invalid_input_error_code) + + if request.token_mode == TokenMode.DISABLE and request.tokens: + raise SkyflowError(SkyflowMessages.Error.TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE_STRICT: + if len(request.values) != len(request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("INSERT"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, invalid_input_error_code) + + for v, t in zip(request.values, request.tokens): + if set(v.keys()) != set(t.keys()): + log_error_log(SkyflowMessages.ErrorLogs.MISMATCH_OF_FIELDS_AND_TOKENS.value.format("INSERT"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, invalid_input_error_code) + +def validate_delete_request(logger, request): + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not request.ids: + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_IDS.value.format("DELETE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_RECORD_IDS_IN_DELETE.value, invalid_input_error_code) + +def validate_query_request(logger, request): + if not request.query: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_IS_REQUIRED.value.format("QUERY"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_QUERY.value, invalid_input_error_code) + + if not isinstance(request.query, str): + query_type = str(type(request.query)) + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_TYPE.value.format(query_type), invalid_input_error_code) + + if not request.query.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_QUERY.value.format("QUERY"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_QUERY.value, invalid_input_error_code) + + if not request.query.upper().startswith("SELECT"): + command = request.query + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_COMMAND.value.format(command), invalid_input_error_code) + +def validate_get_request(logger, request): + redaction_type = request.redaction_type + column_name = request.column_name + column_values = request.column_values + skyflow_ids = request.ids + fields = request.fields + offset = request.offset + limit = request.limit + download_url = request.download_url + + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not skyflow_ids and not column_name and not column_values: + log_error_log(SkyflowMessages.ErrorLogs.NEITHER_IDS_NOR_COLUMN_NAME_PASSED.value.format("GET"), logger = logger) + + if skyflow_ids and (not isinstance(skyflow_ids, list) or not skyflow_ids): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_IDS.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_IDS_TYPE.value.format(type(skyflow_ids)), invalid_input_error_code) + + if skyflow_ids: + for index, skyflow_id in enumerate(skyflow_ids): + if skyflow_id is None or skyflow_id == "": + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_OR_NULL_ID_IN_IDS.value.format("GET", index), + logger=logger) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if redaction_type is not None and not isinstance(redaction_type, RedactionType): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(redaction_type)), invalid_input_error_code) + + if fields is not None and (not isinstance(fields, list) or not fields): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_FIELDS.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_FIELDS_VALUE.value.format(type(fields)), invalid_input_error_code) + + if offset is not None and limit is not None: + raise SkyflowError( + SkyflowMessages.Error.BOTH_OFFSET_AND_LIMIT_SPECIFIED.value, + invalid_input_error_code) + + if offset is not None and not isinstance(offset, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_OFF_SET_VALUE.value(type(offset)), invalid_input_error_code) + + if limit is not None and not isinstance(limit, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_LIMIT_VALUE.value(type(limit)), invalid_input_error_code) + + if download_url is not None and not isinstance(download_url, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_DOWNLOAD_URL_VALUE.value(type(download_url)), invalid_input_error_code) + + if column_name is not None and (not isinstance(column_name, str) or not column_name.strip()): + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + + if column_values is not None and ( + not isinstance(column_values, list) or not column_values or not all( + isinstance(val, str) for val in column_values)): + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_VALUE.value.format(type(column_values)), invalid_input_error_code) + + if request.return_tokens and redaction_type: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZATION_NOT_SUPPORTED_WITH_REDACTION.value.format("GET"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.REDACTION_WITH_TOKENS_NOT_SUPPORTED.value, invalid_input_error_code) + + if (column_name or column_values) and request.return_tokens: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZATION_SUPPORTED_ONLY_WITH_IDS.value.format("GET"), + logger=logger) + raise SkyflowError(SkyflowMessages.Error.TOKENS_GET_COLUMN_NOT_SUPPORTED.value, invalid_input_error_code) + + if column_values and not column_name: + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_VALUES_IS_REQUIRED_GET.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_VALUE.value.format(type(column_values)), invalid_input_error_code) + + if column_name and not column_values: + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_NAME_IS_REQUIRED.value.format("GET"), logger = logger) + SkyflowError(SkyflowMessages.Error.INVALID_COLUMN_NAME.value.format(type(column_name)), invalid_input_error_code) + + if (column_name or column_values) and skyflow_ids: + log_error_log(SkyflowMessages.ErrorLogs.BOTH_IDS_AND_COLUMN_NAME_PASSED.value.format("GET"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED.value, invalid_input_error_code) + +def validate_update_request(logger, request): + skyflow_id = "" + field = {key: value for key, value in request.data.items() if key != "skyflow_id"} + + try: + skyflow_id = request.data.get("skyflow_id") + except Exception: + log_error_log(SkyflowMessages.ErrorLogs.SKYFLOW_ID_IS_REQUIRED.value.format("UPDATE"), logger=logger) + + if not skyflow_id.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_SKYFLOW_ID.value.format("UPDATE"), logger = logger) + + if not isinstance(request.table, str): + log_error_log(SkyflowMessages.ErrorLogs.TABLE_IS_REQUIRED.value.format("UPDATE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TABLE_VALUE.value, invalid_input_error_code) + if not request.table.strip(): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TABLE_NAME.value.format("UPDATE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TABLE_VALUE.value, invalid_input_error_code) + + if not isinstance(request.return_tokens, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_RETURN_TOKENS_TYPE.value, invalid_input_error_code) + + if not isinstance(request.data, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_FIELDS_TYPE.value(type(request.data)), invalid_input_error_code) + + if not len(request.data.items()): + raise SkyflowError(SkyflowMessages.Error.UPDATE_FIELD_KEY_ERROR.value, invalid_input_error_code) + + if request.token_mode is not None: + if not isinstance(request.token_mode, TokenMode): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKEN_MODE_TYPE.value, invalid_input_error_code) + + if request.tokens: + if not isinstance(request.tokens, dict) or not request.tokens: + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value.format("UPDATE"), logger=logger) + raise SkyflowError(SkyflowMessages.Error.INVALID_TYPE_OF_DATA_IN_INSERT.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE and not request.tokens: + raise SkyflowError(SkyflowMessages.Error.NO_TOKENS_IN_INSERT.value.format(request.token_mode), + invalid_input_error_code) + + if request.token_mode == TokenMode.DISABLE and request.tokens: + raise SkyflowError(SkyflowMessages.Error.TOKENS_PASSED_FOR_TOKEN_MODE_DISABLE.value, invalid_input_error_code) + + if request.token_mode == TokenMode.ENABLE_STRICT: + if len(field) != len(request.tokens): + log_error_log( + SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("UPDATE"), + logger=logger) + raise SkyflowError(SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, + invalid_input_error_code) + + if set(field.keys()) != set(request.tokens.keys()): + log_error_log( + SkyflowMessages.ErrorLogs.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value.format("UPDATE"), + logger=logger) + raise SkyflowError( + SkyflowMessages.Error.INSUFFICIENT_TOKENS_PASSED_FOR_TOKEN_MODE_ENABLE_STRICT.value, + invalid_input_error_code) + +def validate_detokenize_request(logger, request): + if not isinstance(request.redaction_type, RedactionType): + raise SkyflowError(SkyflowMessages.Error.INVALID_REDACTION_TYPE.value.format(type(request.redaction_type)), invalid_input_error_code) + + if not isinstance(request.continue_on_error, bool): + raise SkyflowError(SkyflowMessages.Error.INVALID_CONTINUE_ON_ERROR_TYPE.value, invalid_input_error_code) + + if not len(request.tokens): + log_error_log(SkyflowMessages.ErrorLogs.TOKENS_REQUIRED.value.format("DETOKENIZE"), logger = logger) + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_TOKENS.value.format("DETOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENS_LIST_VALUE.value, invalid_input_error_code) + + if not isinstance(request.tokens, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENS_LIST_VALUE.value(type(request.tokens)), invalid_input_error_code) + +def validate_tokenize_request(logger, request): + parameters = request.tokenize_parameters + if not isinstance(parameters, list): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETERS.value.format(type(parameters)), invalid_input_error_code) + + if not len(parameters): + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETERS.value, invalid_input_error_code) + + for i, param in enumerate(parameters): + if not isinstance(param, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETER.value.format(i, type(param)), invalid_input_error_code) + + allowed_keys = {"value", "column_group"} + + if set(param.keys()) != allowed_keys: + raise SkyflowError(SkyflowMessages.Error.INVALID_TOKENIZE_PARAMETER_KEY.value.format(i), invalid_input_error_code) + + if not param.get("value"): + log_error_log(SkyflowMessages.ErrorLogs.COLUMN_VALUES_IS_REQUIRED_TOKENIZE.value.format("TOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETER_VALUE.value.format(i), invalid_input_error_code) + if not param.get("column_group"): + log_error_log(SkyflowMessages.ErrorLogs.EMPTY_COLUMN_GROUP_IN_COLUMN_VALUES.value.format("TOKENIZE"), logger = logger) + raise SkyflowError(SkyflowMessages.Error.EMPTY_TOKENIZE_PARAMETER_COLUMN_GROUP.value.format(i), invalid_input_error_code) + +def validate_invoke_connection_params(logger, query_params, path_params): + if not isinstance(path_params, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_PATH_PARAMS.value, invalid_input_error_code) + + if not isinstance(query_params, dict): + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) + + for param, value in path_params.items(): + if not(isinstance(param, str) and isinstance(value, str)): + raise SkyflowError(SkyflowMessages.Error.INVALID_PATH_PARAMS.value, invalid_input_error_code) + + for param, value in query_params.items(): + if not isinstance(param, str): + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) + + try: + json.dumps(query_params) + except TypeError: + raise SkyflowError(SkyflowMessages.Error.INVALID_QUERY_PARAMS.value, invalid_input_error_code) diff --git a/skyflow/vault/__init__.py b/skyflow/vault/__init__.py index 7b6868ef..e69de29b 100644 --- a/skyflow/vault/__init__.py +++ b/skyflow/vault/__init__.py @@ -1,5 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from ._client import Client -from ._config import * \ No newline at end of file diff --git a/skyflow/vault/_client.py b/skyflow/vault/_client.py deleted file mode 100644 index e426f59f..00000000 --- a/skyflow/vault/_client.py +++ /dev/null @@ -1,283 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import types -import requests -import asyncio -from skyflow.vault._insert import getInsertRequestBody, processResponse, convertResponse -from skyflow.vault._update import sendUpdateRequests, createUpdateResponseBody -from skyflow.vault._config import Configuration, ConnectionConfig, DeleteOptions, DetokenizeOptions, GetOptions, InsertOptions, UpdateOptions, QueryOptions -from skyflow.vault._connection import createRequest -from skyflow.vault._detokenize import sendDetokenizeRequests, createDetokenizeResponseBody -from skyflow.vault._get_by_id import sendGetByIdRequests, createGetResponseBody -from skyflow.vault._get import sendGetRequests -from skyflow.vault._delete import deleteProcessResponse -from skyflow.vault._query import getQueryRequestBody, getQueryResponse -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import log_info, log_error, InfoMessages, InterfaceName, getMetrics -from skyflow.vault._token import tokenProviderWrapper - -class Client: - def __init__(self, config: Configuration): - - interface = InterfaceName.CLIENT.value - - log_info(InfoMessages.INITIALIZE_CLIENT.value, interface=interface) - - if not isinstance(config.vaultID, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.VAULT_ID_INVALID_TYPE.value % ( - str(type(config.vaultID))), interface=interface) - if not isinstance(config.vaultURL, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.VAULT_URL_INVALID_TYPE.value % ( - str(type(config.vaultURL))), interface=interface) - - if not isinstance(config.tokenProvider, types.FunctionType): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.TOKEN_PROVIDER_ERROR.value % ( - str(type(config.tokenProvider))), interface=interface) - - self.vaultID = config.vaultID - self.vaultURL = config.vaultURL.rstrip('/') - self.tokenProvider = config.tokenProvider - self.storedToken = '' - log_info(InfoMessages.CLIENT_INITIALIZED.value, interface=interface) - - def insert(self, records: dict, options: InsertOptions = InsertOptions()): - interface = InterfaceName.INSERT.value - log_info(InfoMessages.INSERT_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - jsonBody = getInsertRequestBody(records, options) - requestURL = self._get_complete_vault_url() - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - - response = requests.post(requestURL, data=jsonBody, headers=headers) - processedResponse = processResponse(response) - result, partial = convertResponse(records, processedResponse, options) - if partial: - log_error(SkyflowErrorMessages.BATCH_INSERT_PARTIAL_SUCCESS.value, interface) - elif 'records' not in result: - log_error(SkyflowErrorMessages.BATCH_INSERT_FAILURE.value, interface) - else: - log_info(InfoMessages.INSERT_DATA_SUCCESS.value, interface) - return result - - def detokenize(self, records: dict, options: DetokenizeOptions = DetokenizeOptions()): - interface = InterfaceName.DETOKENIZE.value - log_info(InfoMessages.DETOKENIZE_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() + '/detokenize' - responses = asyncio.run(sendDetokenizeRequests( - records, url, self.storedToken, options)) - result, partial = createDetokenizeResponseBody(records, responses, options) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - elif 'records' not in result: - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, SkyflowErrorMessages.SERVER_ERROR, result, interface=interface) - else: - log_info(InfoMessages.DETOKENIZE_SUCCESS.value, interface) - return result - - def get(self, records, options: GetOptions = GetOptions()): - interface = InterfaceName.GET.value - log_info(InfoMessages.GET_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendGetRequests( - records, options, url, self.storedToken)) - result, partial = createGetResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.GET_SUCCESS.value, interface) - - return result - - def get_by_id(self, records): - interface = InterfaceName.GET_BY_ID.value - log_info(InfoMessages.GET_BY_ID_TRIGGERED.value, interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendGetByIdRequests( - records, url, self.storedToken)) - result, partial = createGetResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.GET_BY_ID_SUCCESS.value, interface) - - return result - - def invoke_connection(self, config: ConnectionConfig): - - interface = InterfaceName.INVOKE_CONNECTION.value - log_info(InfoMessages.INVOKE_CONNECTION_TRIGGERED.value, interface) - - session = requests.Session() - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - request = createRequest(config) - - if not 'X-Skyflow-Authorization'.lower() in request.headers: - request.headers['x-skyflow-authorization'] = self.storedToken - - request.headers['sky-metadata'] = json.dumps(getMetrics()) - - response = session.send(request) - session.close() - return processResponse(response, interface=interface) - - def query(self, queryInput, options: QueryOptions = QueryOptions()): - interface = InterfaceName.QUERY.value - log_info(InfoMessages.QUERY_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - jsonBody = getQueryRequestBody(queryInput, options) - requestURL = self._get_complete_vault_url() + "/query" - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Content-Type": "application/json", - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - - response = requests.post(requestURL, data=jsonBody, headers=headers) - result = getQueryResponse(response) - - log_info(InfoMessages.QUERY_SUCCESS.value, interface) - return result - - def _checkConfig(self, interface): - ''' - Performs basic check on the given client config - ''' - if not len(self.vaultID) > 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_VAULT_ID, interface=interface) - if not len(self.vaultURL) > 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_VAULT_URL, interface=interface) - - def _get_complete_vault_url(self): - ''' - Get the complete vault url from given vault url and vault id - ''' - return self.vaultURL + "/v1/vaults/" + self.vaultID - - def update(self, updateInput, options: UpdateOptions = UpdateOptions()): - interface = InterfaceName.UPDATE.value - log_info(InfoMessages.UPDATE_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - url = self._get_complete_vault_url() - responses = asyncio.run(sendUpdateRequests( - updateInput, options, url, self.storedToken)) - result, partial = createUpdateResponseBody(responses) - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - else: - log_info(InfoMessages.UPDATE_DATA_SUCCESS.value, interface) - return result - - def delete(self, records: dict, options: DeleteOptions = DeleteOptions()): - interface = InterfaceName.DELETE.value - log_info(InfoMessages.DELETE_TRIGGERED.value, interface=interface) - - self._checkConfig(interface) - - self.storedToken = tokenProviderWrapper( - self.storedToken, self.tokenProvider, interface) - headers = { - "Authorization": "Bearer " + self.storedToken, - "sky-metadata": json.dumps(getMetrics()) - } - error_list = [] - result_list = [] - errors = {} - result = {} - try: - record = records["records"] - if not isinstance(record, list): - recordsType = str(type(record)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - if len(record) == 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_RECORDS_IN_DELETE, interface=interface) - - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - try: - for record in records["records"]: - id = record["id"] - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_ID_TYPE.value % (idType), interface=interface) - if id == "": - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_ID_IN_DELETE, interface=interface) - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - try: - for record in records["records"]: - table = record["table"] - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - if table == "": - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.EMPTY_TABLE_IN_DELETE, interface=interface) - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - - partial=None - - for record in records["records"]: - request_url = self._get_complete_vault_url() + "/" + record["table"] + "/" + record["id"] - response = requests.delete(request_url, headers=headers) - partial,processed_response = deleteProcessResponse(response, records) - if processed_response is not None and processed_response.get('code') == 404: - errors.update({'id': record["id"], 'error': processed_response}) - error_list.append(errors) - else: - result_list.append(processed_response) - if result_list: - result.update({'records': result_list}) - if errors: - result.update({'errors': error_list}) - - if partial: - raise SkyflowError(SkyflowErrorCodes.PARTIAL_SUCCESS, - SkyflowErrorMessages.PARTIAL_SUCCESS, result, interface=interface) - - else: - log_info(InfoMessages.DELETE_DATA_SUCCESS.value, interface) - return result diff --git a/skyflow/vault/_config.py b/skyflow/vault/_config.py deleted file mode 100644 index 796c1824..00000000 --- a/skyflow/vault/_config.py +++ /dev/null @@ -1,89 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from enum import Enum -from types import FunctionType -from typing import List - - -class Configuration: - - def __init__(self, vaultID: str = None, vaultURL: str = None, tokenProvider: FunctionType = None): - - self.vaultID = '' - self.vaultURL = '' - - if tokenProvider == None and vaultURL == None and isinstance(vaultID, FunctionType): - self.tokenProvider = vaultID - elif tokenProvider == None and vaultID == None and isinstance(vaultURL, FunctionType): - self.tokenProvider = vaultURL - else: - if tokenProvider is None: - raise TypeError('tokenProvider must be given') - self.vaultID = vaultID or "" - self.vaultURL = vaultURL or "" - self.tokenProvider = tokenProvider - -class BYOT(Enum): - DISABLE = "DISABLE" - ENABLE = "ENABLE" - ENABLE_STRICT = "ENABLE_STRICT" - -class UpsertOption: - def __init__(self, table: str, column: str): - self.table = table - self.column = column - - -class InsertOptions: - def __init__(self, tokens: bool=True, upsert :List[UpsertOption]=None, continueOnError:bool=None, byot:BYOT=BYOT.DISABLE): - self.tokens = tokens - self.upsert = upsert - self.continueOnError = continueOnError - self.byot = byot - - -class UpdateOptions: - def __init__(self, tokens: bool = True): - self.tokens = tokens - -class GetOptions: - def __init__(self, tokens: bool = False): - self.tokens = tokens - -class DeleteOptions: - def __init__(self, tokens: bool=False): - self.tokens = tokens - -class QueryOptions: - def __init__(self): - pass - -class DetokenizeOptions: - def __init__(self, continueOnError: bool=True): - self.continueOnError = continueOnError - -class RequestMethod(Enum): - GET = 'GET' - POST = 'POST' - PUT = 'PUT' - PATCH = 'PATCH' - DELETE = 'DELETE' - - -class ConnectionConfig: - def __init__(self, connectionURL: str, methodName: RequestMethod, - pathParams: dict = {}, queryParams: dict = {}, requestHeader: dict = {}, requestBody: dict = {}): - self.connectionURL = connectionURL.rstrip("/") - self.methodName = methodName - self.pathParams = pathParams - self.queryParams = queryParams - self.requestHeader = requestHeader - self.requestBody = requestBody - - -class RedactionType(Enum): - PLAIN_TEXT = "PLAIN_TEXT" - MASKED = "MASKED" - REDACTED = "REDACTED" - DEFAULT = "DEFAULT" diff --git a/skyflow/vault/_connection.py b/skyflow/vault/_connection.py deleted file mode 100644 index 86f75c08..00000000 --- a/skyflow/vault/_connection.py +++ /dev/null @@ -1,116 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from requests.sessions import PreparedRequest -from ._config import ConnectionConfig -from skyflow.errors._skyflow_errors import * -import requests -import json - -from skyflow._utils import InterfaceName, http_build_query, supported_content_types, r_urlencode - -interface = InterfaceName.INVOKE_CONNECTION.value - - -def createRequest(config: ConnectionConfig) -> PreparedRequest: - url = parsePathParams(config.connectionURL.rstrip('/'), config.pathParams) - - try: - if isinstance(config.requestHeader, dict): - header = to_lowercase_keys(json.loads( - json.dumps(config.requestHeader))) - else: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_REQUEST_BODY, interface=interface) - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_HEADERS, interface=interface) - if not 'Content-Type'.lower() in header: - header['content-type'] = supported_content_types["JSON"] - - try: - if isinstance(config.requestBody, dict): - json_data, files = get_data_from_content_type( - config.requestBody, header["content-type"]) - else: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_RESPONSE_BODY, interface=interface) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_REQUEST_BODY, interface=interface) - - verifyParams(config.queryParams, config.pathParams) - - try: - return requests.Request( - method=config.methodName.value, - url=url, - data=json_data, - headers=header, - params=config.queryParams, - files=files - ).prepare() - except requests.exceptions.InvalidURL: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_URL.value % ( - config.connectionURL), interface=interface) - - -def parsePathParams(url, pathParams): - result = url - for param, value in pathParams.items(): - result = result.replace('{' + param + '}', value) - - return result - - -def verifyParams(queryParams, pathParams): - if not isinstance(pathParams, dict): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_PATH_PARAMS, interface=interface) - if not isinstance(queryParams, dict): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_QUERY_PARAMS, interface=interface) - - for param, value in pathParams.items(): - if not(isinstance(param, str) and isinstance(value, str)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_PATH_PARAM_TYPE.value % ( - str(type(param)), str(type(value))), interface=interface) - - for param, value in queryParams.items(): - if not isinstance(param, str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_QUERY_PARAM_TYPE.value % ( - str(type(param)), str(type(value))), interface=interface) - - try: - json.dumps(queryParams) - except TypeError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_QUERY_PARAMS, interface=interface) - - -def to_lowercase_keys(dict): - ''' - convert keys of dictionary to lowercase - ''' - result = {} - for key, value in dict.items(): - result[key.lower()] = value - - return result - - -def get_data_from_content_type(data, content_type): - ''' - Get request data according to content type - ''' - converted_data = data - files = {} - if content_type == supported_content_types["URLENCODED"]: - converted_data = http_build_query(data) - elif content_type == supported_content_types["FORMDATA"]: - converted_data = r_urlencode(list(), dict(), data) - files = {(None, None)} - elif content_type == supported_content_types["JSON"]: - converted_data = json.dumps(data) - - return converted_data, files diff --git a/skyflow/vault/_delete.py b/skyflow/vault/_delete.py deleted file mode 100644 index 9faa8820..00000000 --- a/skyflow/vault/_delete.py +++ /dev/null @@ -1,43 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName - -interface = InterfaceName.DELETE.value - - -def deleteProcessResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content - partial = False - try: - response.raise_for_status() - if statusCode == 204: - return None - try: - return partial,json.loads(content) - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response is not None and response.content is not None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == dict and 'message' in errorResponse[ - 'error']: - message = errorResponse['error']['message'] - partial=True - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - error = {} - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - error.update({"code": statusCode, "description": message}) - return partial,error - diff --git a/skyflow/vault/_detokenize.py b/skyflow/vault/_detokenize.py deleted file mode 100644 index 9e19c3f2..00000000 --- a/skyflow/vault/_detokenize.py +++ /dev/null @@ -1,134 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession, request -import json -from ._config import RedactionType -from skyflow._utils import InterfaceName, getMetrics -from skyflow.vault._config import DetokenizeOptions - -interface = InterfaceName.DETOKENIZE.value - - -def getDetokenizeRequestBody(data): - try: - token = data["token"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TOKEN_KEY_ERROR, interface=interface) - if not isinstance(token, str): - tokenType = str(type(token)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TOKEN_TYPE.value % ( - tokenType), interface=interface) - - if "redaction" in data: - if not isinstance(data["redaction"], RedactionType): - redactionType = str(type(data["redaction"])) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - else: - redactionType = data["redaction"] - else: - redactionType = RedactionType.PLAIN_TEXT - - requestBody = {"detokenizationParameters": []} - requestBody["detokenizationParameters"].append({ - "token": token, - "redaction": redactionType.value - }) - return requestBody - -def getBulkDetokenizeRequestBody(records): - bulkRequestBody = {"detokenizationParameters": []} - for record in records: - requestBody = getDetokenizeRequestBody(record) - bulkRequestBody["detokenizationParameters"].append(requestBody["detokenizationParameters"][0]) - return bulkRequestBody - -async def sendDetokenizeRequests(data, url, token, options: DetokenizeOptions): - - tasks = [] - - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - if not options.continueOnError: - requestBody = getBulkDetokenizeRequestBody(records) - jsonBody = json.dumps(requestBody) - validatedRecords.append(jsonBody) - else: - for record in records: - requestBody = getDetokenizeRequestBody(record) - jsonBody = json.dumps(requestBody) - validatedRecords.append(jsonBody) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - - } - task = asyncio.ensure_future(post(url, record, headers, session)) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - - -async def post(url, data, headers, session): - async with session.post(url, data=data, headers=headers, ssl=False) as response: - try: - return (await response.read(), response.status, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status) - - -def createDetokenizeResponseBody(records, responses, options: DetokenizeOptions): - result = { - "records": [], - "errors": [] - } - partial = False - for index, response in enumerate(responses): - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - for record in jsonRes["records"]: - temp = {} - temp["token"] = record["token"] - temp["value"] = record["value"] - result["records"].append(temp) - else: - temp = {"error": {}} - - if options.continueOnError: - temp["token"] = records["records"][index]["token"] - - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 2 and r[2] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[2]) - result["errors"].append(temp) - partial = True - if len(result["records"]) == 0: - partial = False - result.pop("records") - elif len(result["errors"]) == 0: - result.pop("errors") - return result, partial diff --git a/skyflow/vault/_get.py b/skyflow/vault/_get.py deleted file mode 100644 index f00ed2e4..00000000 --- a/skyflow/vault/_get.py +++ /dev/null @@ -1,127 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession -from skyflow.vault._config import RedactionType, GetOptions -from skyflow._utils import InterfaceName, getMetrics -from skyflow.vault._get_by_id import get - -interface = InterfaceName.GET.value - -def getGetRequestBody(data, options: GetOptions): - requestBody = {} - ids = None - if "ids" in data: - ids = data["ids"] - if not isinstance(ids, list): - idsType = str(type(ids)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_IDS_TYPE.value % (idsType), interface=interface) - for id in ids: - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_ID_TYPE.value % ( - idType), interface=interface) - requestBody["skyflow_ids"] = ids - try: - table = data["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - else: - requestBody["tableName"] = table - - if options.tokens: - if data.get("redaction"): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_WITH_TOKENS_NOT_SUPPORTED, interface=interface) - if (data.get('columnName') or data.get('columnValues')): - raise SkyflowError(SkyflowErrorCodes.TOKENS_GET_COLUMN_NOT_SUPPORTED, - SkyflowErrorMessages.TOKENS_GET_COLUMN_NOT_SUPPORTED, interface=interface) - requestBody["tokenization"] = options.tokens - else: - try: - redaction = data["redaction"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_KEY_ERROR, interface=interface) - if not isinstance(redaction, RedactionType): - redactionType = str(type(redaction)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - else: - requestBody["redaction"] = redaction.value - - columnName = None - if "columnName" in data: - columnName = data["columnName"] - if not isinstance(columnName, str): - columnNameType = str(type(columnName)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_COLUMN_NAME.value % ( - columnNameType), interface=interface) - - columnValues = None - if columnName is not None and "columnValues" in data: - columnValues = data["columnValues"] - if not isinstance(columnValues, list): - columnValuesType = str(type(columnValues)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_COLUMN_VALUE.value % ( - columnValuesType), interface=interface) - else: - requestBody["column_name"] = columnName - requestBody["column_values"] = columnValues - - if (ids is None and (columnName is None or columnValues is None)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR, interface=interface) - elif (ids != None and (columnName != None or columnValues != None)): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED, interface=interface) - return requestBody - -async def sendGetRequests(data, options: GetOptions, url, token): - tasks = [] - try: - records = data["records"] - except KeyError: - raise SkyflowError( - SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, - interface=interface - ) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError( - SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % recordsType, - interface=interface - ) - - validatedRecords = [] - for record in records: - requestBody = getGetRequestBody(record, options) - validatedRecords.append(requestBody) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - table = record.pop("tableName") - params = record - if options.tokens: - params["tokenization"] = json.dumps(record["tokenization"]) - task = asyncio.ensure_future( - get(url, headers, params, session, table) - ) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks \ No newline at end of file diff --git a/skyflow/vault/_get_by_id.py b/skyflow/vault/_get_by_id.py deleted file mode 100644 index d4ad6a04..00000000 --- a/skyflow/vault/_get_by_id.py +++ /dev/null @@ -1,116 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -import asyncio -from aiohttp import ClientSession -import json -from ._config import RedactionType -from skyflow._utils import InterfaceName, getMetrics - -interface = InterfaceName.GET_BY_ID.value - -def getGetByIdRequestBody(data): - try: - ids = data["ids"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - if not isinstance(ids, list): - idsType = str(type(ids)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_IDS_TYPE.value % (idsType), interface=interface) - for id in ids: - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_ID_TYPE.value % ( - idType), interface=interface) - try: - table = data["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - try: - redaction = data["redaction"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.REDACTION_KEY_ERROR, interface=interface) - if not isinstance(redaction, RedactionType): - redactionType = str(type(redaction)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % ( - redactionType), interface=interface) - return ids, table, redaction.value - - -async def sendGetByIdRequests(data, url, token): - tasks = [] - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - for record in records: - ids, table, redaction = getGetByIdRequestBody(record) - validatedRecords.append((ids, table, redaction)) - async with ClientSession() as session: - for record in validatedRecords: - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - params = {"skyflow_ids": record[0], "redaction": record[2]} - task = asyncio.ensure_future( - get(url, headers, params, session, record[1])) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - -async def get(url, headers, params, session, table): - async with session.get(url + "/" + table, headers=headers, params=params, ssl=False) as response: - try: - return (await response.read(), response.status, table, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status, table) - -def createGetResponseBody(responses): - result = { - "records": [], - "errors": [] - } - partial = False - for response in responses: - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - changedRecords = [] - for record in jsonRes["records"]: - temp = record - temp["table"] = r[2] - changedRecords.append(temp) - result["records"] += changedRecords - else: - temp = {"error": {}} - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 3 and r[3] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[3]) - result["errors"].append(temp) - partial = True - return result, partial diff --git a/skyflow/vault/_insert.py b/skyflow/vault/_insert.py deleted file mode 100644 index 8de342ae..00000000 --- a/skyflow/vault/_insert.py +++ /dev/null @@ -1,238 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName -from skyflow.vault._config import BYOT, InsertOptions - -interface = InterfaceName.INSERT.value - - -def getInsertRequestBody(data, options: InsertOptions): - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - upsertOptions = options.upsert - - if upsertOptions: - validateUpsertOptions(upsertOptions=upsertOptions) - - requestPayload = [] - for index, record in enumerate(records): - tableName, fields = getTableAndFields(record) - postPayload = { - "tableName": tableName, - "fields": fields, - "method": "POST", - "quorum": True, - } - validateTokensAndByotMode(record, options.byot) - if "tokens" in record: - tokens = getTokens(record) - postPayload["tokens"] = tokens - - if upsertOptions: - postPayload["upsert"] = getUpsertColumn(tableName,upsertOptions) - - if options.tokens: - postPayload['tokenization'] = True - - requestPayload.append(postPayload) - requestBody = { - "records": requestPayload, - "continueOnError": options.continueOnError, - "byot": options.byot.value - } - if options.continueOnError == None: - requestBody.pop('continueOnError') - try: - jsonBody = json.dumps(requestBody) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_JSON.value % ( - 'insert payload'), interface=interface) - - return jsonBody - - -def getTableAndFields(record): - try: - table = record["table"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TABLE_KEY_ERROR, interface=interface) - - if not isinstance(table, str): - tableType = str(type(table)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - tableType), interface=interface) - - try: - fields = record["fields"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.FIELDS_KEY_ERROR, interface=interface) - - if not isinstance(fields, dict): - fieldsType = str(type(fields)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % ( - fieldsType), interface=interface) - - return (table, fields) - -def validateTokensAndByotMode(record, byot:BYOT): - - if not isinstance(byot, BYOT): - byotType = str(type(byot)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_BYOT_TYPE.value % (byotType), interface=interface) - - if byot == BYOT.DISABLE: - if "tokens" in record: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.TOKENS_PASSED_FOR_BYOT_DISABLE, interface=interface) - elif "tokens" not in record: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % byot.value, interface=interface) - elif byot == BYOT.ENABLE_STRICT: - tokens = record["tokens"] - fields = record["fields"] - if len(tokens) != len(fields): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT, interface=interface) - -def getTokens(record): - tokens = record["tokens"] - if not isinstance(tokens, dict): - tokensType = str(type(tokens)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_TOKENS_TYPE.value % ( - tokensType), interface=interface) - - if len(tokens) == 0 : - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_TOKENS_IN_INSERT, interface= interface) - - fields = record["fields"] - for tokenKey in tokens: - if tokenKey not in fields: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS, interface= interface) - return tokens - -def processResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content.decode('utf-8') - try: - response.raise_for_status() - try: - jsonContent = json.loads(content) - if 'x-request-id' in response.headers: - requestId = response.headers['x-request-id'] - jsonContent['requestId'] = requestId - return jsonContent - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response != None and response.content != None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - raise SkyflowError(statusCode, message, interface=interface) - - -def convertResponse(request: dict, response: dict, options: InsertOptions): - responseArray = response['responses'] - requestId = response['requestId'] - records = request['records'] - - if options.continueOnError: - return buildResponseWithContinueOnError(responseArray, records, options.tokens, requestId) - - else: - return buildResponseWithoutContinueOnError(responseArray, records, options.tokens) - -def buildResponseWithContinueOnError(responseArray, records, tokens: bool, requestId): - partial = False - errors = [] - result = [] - for idx, response in enumerate(responseArray): - table = records[idx]['table'] - body = response['Body'] - status = response['Status'] - - if 'records' in body: - skyflow_id = body['records'][0]['skyflow_id'] - if tokens: - fieldsDict = body['records'][0]['tokens'] - fieldsDict['skyflow_id'] = skyflow_id - result.append({'table': table, 'fields': fieldsDict, 'request_index': idx}) - else: - result.append({'table': table, 'skyflow_id': skyflow_id, 'request_index': idx}) - elif 'error' in body: - partial = True - message = body['error'] - message += ' - request id: ' + requestId - error = {"code": status, "description": message, "request_index": idx} - errors.append({"error": error}) - finalResponse = {"records": result, "errors": errors} - if len(result) == 0: - partial = False - finalResponse.pop('records') - elif len(errors) == 0: - finalResponse.pop('errors') - return finalResponse, partial - -def buildResponseWithoutContinueOnError(responseArray, records, tokens: bool): - # recordsSize = len(records) - result = [] - for idx, _ in enumerate(responseArray): - table = records[idx]['table'] - skyflow_id = responseArray[idx]['records'][0]['skyflow_id'] - if tokens: - fieldsDict = responseArray[idx]['records'][0]['tokens'] - fieldsDict['skyflow_id'] = skyflow_id - result.append({'table': table, 'fields': fieldsDict, 'request_index': idx}) - else: - result.append({'table': table, 'skyflow_id': skyflow_id, 'request_index': idx}) - return {'records': result}, False - -def getUpsertColumn(tableName, upsertOptions): - uniqueColumn:str = '' - for upsertOption in upsertOptions: - if tableName == upsertOption.table: - uniqueColumn = upsertOption.column - return uniqueColumn - -def validateUpsertOptions(upsertOptions): - if not isinstance(upsertOptions,list): - upsertOptionsType = str(type(upsertOptions)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_OPTIONS_TYPE.value %( - upsertOptionsType),interface=interface) - if len(upsertOptions) == 0: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTIONS_LIST.value, interface=interface) - - for index, upsertOption in enumerate(upsertOptions): - if upsertOption.table == None or not isinstance(upsertOption.table,str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_TABLE_TYPE.value %( - index),interface=interface) - if upsertOption.table == '': - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_TABLE.value %( - index),interface=interface) - if upsertOption.column == None or not isinstance(upsertOption.column,str): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_UPSERT_COLUMN_TYPE.value %( - index),interface=interface) - if upsertOption.column == '': - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_COLUMN.value %( - index),interface=interface) \ No newline at end of file diff --git a/skyflow/vault/_query.py b/skyflow/vault/_query.py deleted file mode 100644 index 373264fa..00000000 --- a/skyflow/vault/_query.py +++ /dev/null @@ -1,62 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import requests -from ._config import QueryOptions -from requests.models import HTTPError -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow._utils import InterfaceName - -interface = InterfaceName.QUERY.value - - -def getQueryRequestBody(data, options): - try: - query = data["query"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.QUERY_KEY_ERROR, interface=interface) - - if not isinstance(query, str): - queryType = str(type(query)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_QUERY_TYPE.value % queryType, interface=interface) - - if not query.strip(): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT,SkyflowErrorMessages.EMPTY_QUERY.value, interface=interface) - - requestBody = {"query": query} - try: - jsonBody = json.dumps(requestBody) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_JSON.value % ( - 'query payload'), interface=interface) - - return jsonBody - -def getQueryResponse(response: requests.Response, interface=interface): - statusCode = response.status_code - content = response.content.decode('utf-8') - try: - response.raise_for_status() - try: - return json.loads(content) - except: - raise SkyflowError( - statusCode, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content, interface=interface) - except HTTPError: - message = SkyflowErrorMessages.API_ERROR.value % statusCode - if response != None and response.content != None: - try: - errorResponse = json.loads(content) - if 'error' in errorResponse and type(errorResponse['error']) == type({}) and 'message' in errorResponse['error']: - message = errorResponse['error']['message'] - except: - message = SkyflowErrorMessages.RESPONSE_NOT_JSON.value % content - raise SkyflowError(SkyflowErrorCodes.INVALID_INDEX, message, interface=interface) - error = {"error": {}} - if 'x-request-id' in response.headers: - message += ' - request id: ' + response.headers['x-request-id'] - error['error'].update({"code": statusCode, "description": message}) - raise SkyflowError(SkyflowErrorCodes.SERVER_ERROR, SkyflowErrorMessages.SERVER_ERROR.value, error, interface=interface) diff --git a/skyflow/vault/_token.py b/skyflow/vault/_token.py deleted file mode 100644 index d80f1751..00000000 --- a/skyflow/vault/_token.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import jwt -import time -from skyflow.errors._skyflow_errors import * - - -def tokenProviderWrapper(storedToken: str, newTokenProvider, interface: str): - ''' - Check if stored token is not expired, if not return a new token - ''' - - if len(storedToken) == 0: - newToken = newTokenProvider() - verify_token_from_provider(newToken, interface) - return newToken - - try: - decoded = jwt.decode(storedToken, options={ - "verify_signature": False, "verify_aud": False}) - if time.time() < decoded['exp']: - return storedToken - else: - newToken = newTokenProvider() - verify_token_from_provider(newToken, interface) - return newToken - except Exception: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.JWT_DECODE_ERROR, interface=interface) - - -def verify_token_from_provider(token, interface): - ''' - Verify the jwt from token provider - ''' - try: - jwt.decode(token, options={ - "verify_signature": False, - "verify_aud": False - }, algorithms=['RS256']) - except Exception as e: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN, interface=interface) diff --git a/skyflow/vault/_update.py b/skyflow/vault/_update.py deleted file mode 100644 index c27a0919..00000000 --- a/skyflow/vault/_update.py +++ /dev/null @@ -1,106 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json - -import asyncio -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from ._insert import getTableAndFields -from skyflow._utils import InterfaceName, getMetrics -from aiohttp import ClientSession -from ._config import UpdateOptions - -interface = InterfaceName.UPDATE.value - -async def sendUpdateRequests(data,options: UpdateOptions,url,token): - tasks = [] - - try: - records = data["records"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.RECORDS_KEY_ERROR, interface=interface) - if not isinstance(records, list): - recordsType = str(type(records)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % ( - recordsType), interface=interface) - - validatedRecords = [] - for record in records: - tableName = validateUpdateRecord(record) - validatedRecords.append(record) - async with ClientSession() as session: - for record in validatedRecords: - recordUrl = url +'/'+ tableName +'/'+ record["id"] - reqBody = { - "record": { - "fields": record["fields"] - }, - "tokenization": options.tokens - } - reqBody = json.dumps(reqBody) - headers = { - "Authorization": "Bearer " + token, - "sky-metadata": json.dumps(getMetrics()) - } - task = asyncio.ensure_future(put(recordUrl, reqBody, headers, session)) - tasks.append(task) - await asyncio.gather(*tasks) - await session.close() - return tasks - -def validateUpdateRecord(record): - try: - id = record["id"] - except KeyError: - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.IDS_KEY_ERROR, interface=interface) - if not isinstance(id, str): - idType = str(type(id)) - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.INVALID_ID_TYPE.value % (idType), interface=interface) - table, fields = getTableAndFields(record) - keysLength = len(fields.keys()) - if(keysLength < 1): - raise SkyflowError(SkyflowErrorCodes.INVALID_INPUT, - SkyflowErrorMessages.UPDATE_FIELD_KEY_ERROR, interface= interface) - return table - -async def put(url, data, headers, session): - async with session.put(url, data=data, headers=headers, ssl=False) as response: - try: - return (await response.read(), response.status, response.headers['x-request-id']) - except KeyError: - return (await response.read(), response.status) - - -def createUpdateResponseBody(responses): - result = { - "records": [], - "errors": [] - } - partial = False - for response in responses: - r = response.result() - status = r[1] - try: - jsonRes = json.loads(r[0].decode('utf-8')) - except: - raise SkyflowError(status, - SkyflowErrorMessages.RESPONSE_NOT_JSON.value % r[0].decode('utf-8'), interface=interface) - - if status == 200: - temp = {} - temp["id"] = jsonRes["skyflow_id"] - if "tokens" in jsonRes: - temp["fields"] = jsonRes["tokens"] - result["records"].append(temp) - else: - temp = {"error": {}} - temp["error"]["code"] = jsonRes["error"]["http_code"] - temp["error"]["description"] = jsonRes["error"]["message"] - if len(r) > 2 and r[2] != None: - temp["error"]["description"] += ' - Request ID: ' + str(r[2]) - result["errors"].append(temp) - partial = True - return result, partial diff --git a/skyflow/vault/client/__init__.py b/skyflow/vault/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/skyflow/vault/client/client.py b/skyflow/vault/client/client.py new file mode 100644 index 00000000..34a9374a --- /dev/null +++ b/skyflow/vault/client/client.py @@ -0,0 +1,102 @@ +import json +from skyflow.generated.rest import Configuration, RecordsApi, ApiClient, TokensApi, QueryApi +from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, is_expired +from skyflow.utils import get_vault_url, get_credentials, SkyflowMessages +from skyflow.utils.logger import log_info + + +class VaultClient: + def __init__(self, config): + self.__config = config + self.__common_skyflow_credentials = None + self.__log_level = None + self.__client_configuration = None + self.__api_client = None + self.__logger = None + self.__is_config_updated = False + self.__bearer_token = None + + def set_common_skyflow_credentials(self, credentials): + self.__common_skyflow_credentials = credentials + + def set_logger(self, log_level, logger): + self.__log_level = log_level + self.__logger = logger + + def initialize_client_configuration(self): + credentials = get_credentials(self.__config.get("credentials"), self.__common_skyflow_credentials, logger = self.__logger) + token = self.get_bearer_token(credentials) + vault_url = get_vault_url(self.__config.get("cluster_id"), + self.__config.get("env"), + self.__config.get("vault_id"), + logger = self.__logger) + self.__client_configuration = Configuration(host=vault_url, access_token=token) + self.initialize_api_client(self.__client_configuration) + + def initialize_api_client(self, config): + self.__api_client = ApiClient(config) + + def get_records_api(self): + return RecordsApi(self.__api_client) + + def get_tokens_api(self): + return TokensApi(self.__api_client) + + def get_query_api(self): + return QueryApi(self.__api_client) + + def get_vault_id(self): + return self.__config.get("vault_id") + + def get_bearer_token(self, credentials): + if 'api_key' in credentials: + return credentials.get('api_key') + elif 'token' in credentials: + return credentials.get("token") + + options = { + "role_ids": self.__config.get("roles"), + "ctx": self.__config.get("ctx") + } + + if self.__bearer_token is None or self.__is_config_updated: + if 'path' in credentials: + path = credentials.get("path") + self.__bearer_token, _ = generate_bearer_token( + path, + options, + self.__logger + ) + else: + credentials_string = credentials.get('credentials_string') + log_info(SkyflowMessages.Info.GENERATE_BEARER_TOKEN_FROM_CREDENTIALS_STRING_TRIGGERED.value, self.__logger) + self.__bearer_token, _ = generate_bearer_token_from_creds( + credentials_string, + options, + self.__logger + ) + self.__is_config_updated = False + else: + log_info(SkyflowMessages.Info.REUSE_BEARER_TOKEN.value, self.__logger) + + if is_expired(self.__bearer_token): + self.__is_config_updated = True + raise SyntaxError(SkyflowMessages.Error.EXPIRED_TOKEN.value, SkyflowMessages.ErrorCodes.INVALID_INPUT.value) + + return self.__bearer_token + + def update_config(self, config): + self.__config.update(config) + self.__is_config_updated = True + + def get_config(self): + return self.__config + + def get_common_skyflow_credentials(self): + return self.__common_skyflow_credentials + + def get_log_level(self): + return self.__log_level + + def get_logger(self): + return self.__logger \ No newline at end of file diff --git a/skyflow/vault/connection/__init__.py b/skyflow/vault/connection/__init__.py new file mode 100644 index 00000000..d5a3f574 --- /dev/null +++ b/skyflow/vault/connection/__init__.py @@ -0,0 +1,2 @@ +from ._invoke_connection_request import InvokeConnectionRequest +from ._invoke_connection_response import InvokeConnectionResponse \ No newline at end of file diff --git a/skyflow/vault/connection/_invoke_connection_request.py b/skyflow/vault/connection/_invoke_connection_request.py new file mode 100644 index 00000000..9634dfb3 --- /dev/null +++ b/skyflow/vault/connection/_invoke_connection_request.py @@ -0,0 +1,12 @@ +class InvokeConnectionRequest: + def __init__(self, + method, + body = None, + path_params = None, + query_params = None, + headers = None): + self.body = body if body is not None else {} + self.method = method + self.path_params = path_params if path_params is not None else {} + self.query_params = query_params if query_params is not None else {} + self.headers = headers if headers is not None else {} \ No newline at end of file diff --git a/skyflow/vault/connection/_invoke_connection_response.py b/skyflow/vault/connection/_invoke_connection_response.py new file mode 100644 index 00000000..661b61d3 --- /dev/null +++ b/skyflow/vault/connection/_invoke_connection_response.py @@ -0,0 +1,9 @@ +class InvokeConnectionResponse: + def __init__(self, response = None): + self.response = response + + def __repr__(self): + return f"ConnectionResponse({self.response})" + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/controller/__init__.py b/skyflow/vault/controller/__init__.py new file mode 100644 index 00000000..14301fb3 --- /dev/null +++ b/skyflow/vault/controller/__init__.py @@ -0,0 +1,2 @@ +from ._vault import Vault +from ._connections import Connection \ No newline at end of file diff --git a/skyflow/vault/controller/_audit.py b/skyflow/vault/controller/_audit.py new file mode 100644 index 00000000..96aab2fa --- /dev/null +++ b/skyflow/vault/controller/_audit.py @@ -0,0 +1,8 @@ +class Audit: + #members + def __init__(self): + pass + + def list(self): + pass + \ No newline at end of file diff --git a/skyflow/vault/controller/_bin_look_up.py b/skyflow/vault/controller/_bin_look_up.py new file mode 100644 index 00000000..242cb640 --- /dev/null +++ b/skyflow/vault/controller/_bin_look_up.py @@ -0,0 +1,7 @@ +class BinLookUp: + #members + def __init__(self): + pass + + def get(self): + pass \ No newline at end of file diff --git a/skyflow/vault/controller/_connections.py b/skyflow/vault/controller/_connections.py new file mode 100644 index 00000000..52e2c255 --- /dev/null +++ b/skyflow/vault/controller/_connections.py @@ -0,0 +1,40 @@ +import json + +import requests +from skyflow.error import SkyflowError +from skyflow.utils import construct_invoke_connection_request, SkyflowMessages, get_metrics, \ + parse_invoke_connection_response +from skyflow.utils.logger import log_info +from skyflow.vault.connection import InvokeConnectionRequest + + +class Connection: + def __init__(self, vault_client): + self.__vault_client = vault_client + + def invoke(self, request: InvokeConnectionRequest): + session = requests.Session() + + config = self.__vault_client.get_config() + bearer_token = self.__vault_client.get_bearer_token(config.get("credentials")) + + connection_url = config.get("connection_url") + log_info(SkyflowMessages.Info.VALIDATING_INVOKE_CONNECTION_REQUEST.value, self.__vault_client.get_logger()) + invoke_connection_request = construct_invoke_connection_request(request, connection_url, self.__vault_client.get_logger()) + log_info(SkyflowMessages.Info.INVOKE_CONNECTION_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + + if not 'X-Skyflow-Authorization'.lower() in invoke_connection_request.headers: + invoke_connection_request.headers['x-skyflow-authorization'] = bearer_token + + invoke_connection_request.headers['sky-metadata'] = json.dumps(get_metrics()) + + log_info(SkyflowMessages.Info.INVOKE_CONNECTION_TRIGGERED, self.__vault_client.get_logger()) + + try: + response = session.send(invoke_connection_request) + session.close() + invoke_connection_response = parse_invoke_connection_response(response) + return invoke_connection_response + except Exception as e: + print(e) + raise SkyflowError(SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value, SkyflowMessages.ErrorCodes.SERVER_ERROR.value) \ No newline at end of file diff --git a/skyflow/vault/controller/_vault.py b/skyflow/vault/controller/_vault.py new file mode 100644 index 00000000..d656daec --- /dev/null +++ b/skyflow/vault/controller/_vault.py @@ -0,0 +1,260 @@ +from skyflow.generated.rest import V1FieldRecords, RecordServiceInsertRecordBody, V1DetokenizeRecordRequest, \ + V1DetokenizePayload, V1TokenizeRecordRequest, V1TokenizePayload, QueryServiceExecuteQueryBody, \ + RecordServiceBulkDeleteRecordBody, RecordServiceUpdateRecordBody, RecordServiceBatchOperationBody, V1BatchRecord, \ + BatchRecordMethod +from skyflow.generated.rest.exceptions import BadRequestException, UnauthorizedException +from skyflow.utils import SkyflowMessages, parse_insert_response, \ + handle_exception, parse_update_record_response, parse_delete_response, parse_detokenize_response, \ + parse_tokenize_response, parse_query_response, parse_get_response +from skyflow.utils.logger import log_info, log_error_log +from skyflow.utils.validations import validate_insert_request, validate_delete_request, validate_query_request, \ + validate_get_request, validate_update_request, validate_detokenize_request, validate_tokenize_request +from skyflow.vault.data import InsertRequest, UpdateRequest, DeleteRequest, GetRequest, QueryRequest +from skyflow.vault.tokens import DetokenizeRequest, TokenizeRequest + +class Vault: + def __init__(self, vault_client): + self.__vault_client = vault_client + + def __initialize(self): + self.__vault_client.initialize_client_configuration() + + def __build_bulk_field_records(self, values, tokens=None): + if tokens is None: + return [V1FieldRecords(fields=record) for record in values] + else: + return [V1FieldRecords(fields=record, tokens=token) for record, token in zip(values, tokens)] + + def __build_batch_field_records(self, values, tokens, table_name, return_tokens, upsert): + batch_record_list = [] + for i, value in enumerate(values): + token = tokens[i] if tokens is not None and i < len(tokens) else None + batch_record = V1BatchRecord( + fields=value, + table_name=table_name, + method=BatchRecordMethod.POST, + tokenization=return_tokens, + upsert=upsert, + tokens=token + ) + if token is not None: + batch_record.tokens = token + batch_record_list.append(batch_record) + return batch_record_list + + def __build_insert_body(self, request: InsertRequest): + if request.continue_on_error: + records_list = self.__build_batch_field_records( + request.values, + request.tokens, + request.table_name, + request.return_tokens, + request.upsert + ) + body = RecordServiceBatchOperationBody( + records=records_list, + continue_on_error=request.continue_on_error, + byot=request.token_mode.value + ) + return body + else: + records_list = self.__build_bulk_field_records(request.values, request.tokens) + return RecordServiceInsertRecordBody( + records=records_list, + tokenization=request.return_tokens, + upsert=request.upsert, + homogeneous=request.homogeneous, + byot=request.token_mode.value + ) + + def insert(self, request: InsertRequest): + log_info(SkyflowMessages.Info.VALIDATE_INSERT_REQUEST.value, self.__vault_client.get_logger()) + validate_insert_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.INSERT_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + records_api = self.__vault_client.get_records_api() + insert_body = self.__build_insert_body(request) + + try: + log_info(SkyflowMessages.Info.INSERT_TRIGGERED.value, self.__vault_client.get_logger()) + + if request.continue_on_error: + api_response = records_api.record_service_batch_operation(self.__vault_client.get_vault_id(), + insert_body) + + else: + api_response = records_api.record_service_insert_record(self.__vault_client.get_vault_id(), + request.table_name, insert_body) + + insert_response = parse_insert_response(api_response, request.continue_on_error) + log_info(SkyflowMessages.Info.INSERT_SUCCESS.value, self.__vault_client.get_logger()) + return insert_response + + except BadRequestException as e: + log_error_log(SkyflowMessages.ErrorLogs.INSERT_RECORDS_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def update(self, request: UpdateRequest): + log_info(SkyflowMessages.Info.VALIDATE_UPDATE_REQUEST.value, self.__vault_client.get_logger()) + validate_update_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.UPDATE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + field = {key: value for key, value in request.data.items() if key != "skyflow_id"} + record = V1FieldRecords(fields=field, tokens = request.tokens) + payload = RecordServiceUpdateRecordBody(record=record, tokenization=request.return_tokens, byot=request.token_mode.value) + + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.UPDATE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_update_record( + self.__vault_client.get_vault_id(), + request.table, + request.data.get("skyflow_id"), + payload + ) + log_info(SkyflowMessages.Info.UPDATE_SUCCESS.value, self.__vault_client.get_logger()) + update_response = parse_update_record_response(api_response) + return update_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.UPDATE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + handle_exception(e, self.__vault_client.get_logger()) + + def delete(self, request: DeleteRequest): + log_info(SkyflowMessages.Info.VALIDATING_DELETE_REQUEST.value, self.__vault_client.get_logger()) + validate_delete_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DELETE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.DELETE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_bulk_delete_record( + self.__vault_client.get_vault_id(), + request.table, + payload + ) + log_info(SkyflowMessages.Info.DELETE_SUCCESS.value, self.__vault_client.get_logger()) + delete_response = parse_delete_response(api_response) + return delete_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.DELETE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def get(self, request: GetRequest): + log_info(SkyflowMessages.Info.VALIDATE_GET_REQUEST.value, self.__vault_client.get_logger()) + validate_get_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.GET_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + records_api = self.__vault_client.get_records_api() + try: + log_info(SkyflowMessages.Info.GET_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = records_api.record_service_bulk_get_record( + self.__vault_client.get_vault_id(), + object_name=request.table, + skyflow_ids=request.ids, + redaction = request.redaction_type.value if request.redaction_type is not None else None, + tokenization=request.return_tokens, + fields=request.fields, + offset=request.offset, + limit=request.limit, + download_url=request.download_url, + column_name=request.column_name, + column_values=request.column_values, + ) + log_info(SkyflowMessages.Info.GET_SUCCESS.value, self.__vault_client.get_logger()) + get_response = parse_get_response(api_response) + return get_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.GET_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def query(self, request: QueryRequest): + log_info(SkyflowMessages.Info.VALIDATING_QUERY_REQUEST.value, self.__vault_client.get_logger()) + validate_query_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.QUERY_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + payload = QueryServiceExecuteQueryBody(query=request.query) + query_api = self.__vault_client.get_query_api() + try: + log_info(SkyflowMessages.Info.QUERY_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = query_api.query_service_execute_query( + self.__vault_client.get_vault_id(), + payload + ) + log_info(SkyflowMessages.Info.QUERY_SUCCESS.value, self.__vault_client.get_logger()) + query_response = parse_query_response(api_response) + return query_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.QUERY_REQUEST_REJECTED.value, self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def detokenize(self, request: DetokenizeRequest): + log_info(SkyflowMessages.Info.VALIDATE_DETOKENIZE_REQUEST.value, self.__vault_client.get_logger()) + validate_detokenize_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.DETOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + tokens_list = [ + V1DetokenizeRecordRequest(token=token, redaction=request.redaction_type.value) + for token in request.tokens + ] + payload = V1DetokenizePayload(detokenization_parameters=tokens_list, continue_on_error=request.continue_on_error) + tokens_api = self.__vault_client.get_tokens_api() + try: + log_info(SkyflowMessages.Info.DETOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = tokens_api.record_service_detokenize( + self.__vault_client.get_vault_id(), + detokenize_payload=payload + ) + log_info(SkyflowMessages.Info.DETOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) + detokenize_response = parse_detokenize_response(api_response) + return detokenize_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.DETOKENIZE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + + def tokenize(self, request: TokenizeRequest): + log_info(SkyflowMessages.Info.VALIDATING_TOKENIZE_REQUEST.value, self.__vault_client.get_logger()) + validate_tokenize_request(self.__vault_client.get_logger(), request) + log_info(SkyflowMessages.Info.TOKENIZE_REQUEST_RESOLVED.value, self.__vault_client.get_logger()) + self.__initialize() + + records_list = [ + V1TokenizeRecordRequest(value=item["value"], column_group=item["column_group"]) + for item in request.values + ] + payload = V1TokenizePayload(tokenization_parameters=records_list) + tokens_api = self.__vault_client.get_tokens_api() + try: + log_info(SkyflowMessages.Info.TOKENIZE_TRIGGERED.value, self.__vault_client.get_logger()) + api_response = tokens_api.record_service_tokenize( + self.__vault_client.get_vault_id(), + tokenize_payload=payload + ) + tokenize_response = parse_tokenize_response(api_response) + log_info(SkyflowMessages.Info.TOKENIZE_SUCCESS.value, self.__vault_client.get_logger()) + return tokenize_response + except Exception as e: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, logger = self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) + except UnauthorizedException as e: + log_error_log(SkyflowMessages.ErrorLogs.TOKENIZE_REQUEST_REJECTED.value, + logger=self.__vault_client.get_logger()) + handle_exception(e, self.__vault_client.get_logger()) diff --git a/skyflow/vault/data/__init__.py b/skyflow/vault/data/__init__.py new file mode 100644 index 00000000..b43b23cf --- /dev/null +++ b/skyflow/vault/data/__init__.py @@ -0,0 +1,11 @@ +from ._insert_response import InsertResponse +from ._insert_request import InsertRequest +from ._get_request import GetRequest +from ._get_response import GetResponse +from ._delete_request import DeleteRequest +from ._delete_response import DeleteResponse +from ._update_request import UpdateRequest +from ._update_response import UpdateResponse +from ._upload_file_request import UploadFileRequest +from ._query_request import QueryRequest +from ._query_response import QueryResponse \ No newline at end of file diff --git a/skyflow/vault/data/_delete_request.py b/skyflow/vault/data/_delete_request.py new file mode 100644 index 00000000..9f1f2eaa --- /dev/null +++ b/skyflow/vault/data/_delete_request.py @@ -0,0 +1,4 @@ +class DeleteRequest: + def __init__(self, table, ids): + self.table = table + self.ids = ids diff --git a/skyflow/vault/data/_delete_response.py b/skyflow/vault/data/_delete_response.py new file mode 100644 index 00000000..0147c777 --- /dev/null +++ b/skyflow/vault/data/_delete_response.py @@ -0,0 +1,11 @@ +class DeleteResponse: + def __init__(self, deleted_ids = None, errors = None): + self.deleted_ids = deleted_ids + self.errors = errors + + def __repr__(self): + return f"DeleteResponse(deleted_ids={self.deleted_ids}, errors={self.errors})" + + def __str__(self): + return self.__repr__() + diff --git a/skyflow/vault/data/_get_request.py b/skyflow/vault/data/_get_request.py new file mode 100644 index 00000000..81cb21ad --- /dev/null +++ b/skyflow/vault/data/_get_request.py @@ -0,0 +1,22 @@ +class GetRequest: + def __init__(self, + table, + ids = None, + redaction_type = None, + return_tokens = False, + fields = None, + offset = None, + limit = None, + download_url = None, + column_name = None, + column_values = None): + self.table = table + self.ids = ids + self.redaction_type = redaction_type + self.return_tokens = return_tokens + self.fields = fields + self.offset = offset + self.limit = limit + self.download_url = download_url + self.column_name = column_name + self.column_values = column_values \ No newline at end of file diff --git a/skyflow/vault/data/_get_response.py b/skyflow/vault/data/_get_response.py new file mode 100644 index 00000000..cf1b0805 --- /dev/null +++ b/skyflow/vault/data/_get_response.py @@ -0,0 +1,10 @@ +class GetResponse: + def __init__(self, data=None, errors = None): + self.data = data if data else [] + self.errors = errors + + def __repr__(self): + return f"GetResponse(data={self.data}, errors={self.errors})" + + def __str__(self): + return self.__repr__() \ No newline at end of file diff --git a/skyflow/vault/data/_insert_request.py b/skyflow/vault/data/_insert_request.py new file mode 100644 index 00000000..742c5120 --- /dev/null +++ b/skyflow/vault/data/_insert_request.py @@ -0,0 +1,21 @@ +from skyflow.utils.enums import TokenMode + +class InsertRequest: + def __init__(self, + table_name, + values, + tokens = None, + upsert = None, + homogeneous = False, + token_mode = TokenMode.DISABLE, + return_tokens = True, + continue_on_error = False): + self.table_name = table_name + self.values = values + self.tokens = tokens + self.upsert = upsert + self.homogeneous = homogeneous + self.token_mode = token_mode + self.return_tokens = return_tokens + self.continue_on_error = continue_on_error + diff --git a/skyflow/vault/data/_insert_response.py b/skyflow/vault/data/_insert_response.py new file mode 100644 index 00000000..6407426d --- /dev/null +++ b/skyflow/vault/data/_insert_response.py @@ -0,0 +1,12 @@ +class InsertResponse: + def __init__(self, inserted_fields = None, errors=None): + if errors is None: + errors = list() + self.inserted_fields = inserted_fields + self.errors = errors + + def __repr__(self): + return f"InsertResponse(inserted_fields={self.inserted_fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_query_request.py b/skyflow/vault/data/_query_request.py new file mode 100644 index 00000000..e17cb8d0 --- /dev/null +++ b/skyflow/vault/data/_query_request.py @@ -0,0 +1,3 @@ +class QueryRequest: + def __init__(self, query): + self.query = query diff --git a/skyflow/vault/data/_query_response.py b/skyflow/vault/data/_query_response.py new file mode 100644 index 00000000..e2034758 --- /dev/null +++ b/skyflow/vault/data/_query_response.py @@ -0,0 +1,10 @@ +class QueryResponse: + def __init__(self): + self.fields = [] + self.errors = [] + + def __repr__(self): + return f"QueryResponse(fields={self.fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_update_request.py b/skyflow/vault/data/_update_request.py new file mode 100644 index 00000000..5b73c3ae --- /dev/null +++ b/skyflow/vault/data/_update_request.py @@ -0,0 +1,9 @@ +from skyflow.utils.enums import TokenMode + +class UpdateRequest: + def __init__(self, table, data, tokens = None, return_tokens = False, token_mode = TokenMode.DISABLE): + self.table = table + self.data = data + self.tokens = tokens + self.return_tokens = return_tokens + self.token_mode = token_mode diff --git a/skyflow/vault/data/_update_response.py b/skyflow/vault/data/_update_response.py new file mode 100644 index 00000000..dbbb9cc7 --- /dev/null +++ b/skyflow/vault/data/_update_response.py @@ -0,0 +1,10 @@ +class UpdateResponse: + def __init__(self, updated_field = None, errors=None): + self.updated_field = updated_field + self.errors = errors if errors is not None else [] + + def __repr__(self): + return f"UpdateResponse(updated_field={self.updated_field}, errors={self.errors})" + + def __str__(self): + return self.__repr__() diff --git a/skyflow/vault/data/_upload_file_request.py b/skyflow/vault/data/_upload_file_request.py new file mode 100644 index 00000000..b0da1e03 --- /dev/null +++ b/skyflow/vault/data/_upload_file_request.py @@ -0,0 +1,3 @@ +class UploadFileRequest: + def __init__(self): + pass diff --git a/skyflow/vault/tokens/__init__.py b/skyflow/vault/tokens/__init__.py new file mode 100644 index 00000000..ddfe8ad7 --- /dev/null +++ b/skyflow/vault/tokens/__init__.py @@ -0,0 +1,4 @@ +from ._detokenize_request import DetokenizeRequest +from ._detokenize_response import DetokenizeResponse +from ._tokenize_request import TokenizeRequest +from ._tokenize_response import TokenizeResponse \ No newline at end of file diff --git a/skyflow/vault/tokens/_detokenize_request.py b/skyflow/vault/tokens/_detokenize_request.py new file mode 100644 index 00000000..5e3bc041 --- /dev/null +++ b/skyflow/vault/tokens/_detokenize_request.py @@ -0,0 +1,7 @@ +from skyflow.utils.enums.redaction_type import RedactionType + +class DetokenizeRequest: + def __init__(self, tokens, redaction_type = RedactionType.PLAIN_TEXT, continue_on_error = False): + self.tokens = tokens + self.redaction_type = redaction_type + self.continue_on_error = continue_on_error \ No newline at end of file diff --git a/skyflow/vault/tokens/_detokenize_response.py b/skyflow/vault/tokens/_detokenize_response.py new file mode 100644 index 00000000..f42e3917 --- /dev/null +++ b/skyflow/vault/tokens/_detokenize_response.py @@ -0,0 +1,12 @@ +class DetokenizeResponse: + def __init__(self, detokenized_fields = None, errors = None): + self.detokenized_fields = detokenized_fields + self.errors = errors + + def __repr__(self): + return f"DetokenizeResponse(detokenized_fields={self.detokenized_fields}, errors={self.errors})" + + def __str__(self): + return self.__repr__() + + diff --git a/skyflow/vault/tokens/_tokenize_request.py b/skyflow/vault/tokens/_tokenize_request.py new file mode 100644 index 00000000..a1e7c2bc --- /dev/null +++ b/skyflow/vault/tokens/_tokenize_request.py @@ -0,0 +1,3 @@ +class TokenizeRequest: + def __init__(self, values): + self.values = values diff --git a/skyflow/vault/tokens/_tokenize_response.py b/skyflow/vault/tokens/_tokenize_response.py new file mode 100644 index 00000000..264b3987 --- /dev/null +++ b/skyflow/vault/tokens/_tokenize_response.py @@ -0,0 +1,11 @@ +class TokenizeResponse: + def __init__(self, tokenized_fields = None): + self.tokenized_fields = tokenized_fields + + + def __repr__(self): + return f"TokenizeResponse(tokenized_fields={self.tokenized_fields})" + + def __str__(self): + return self.__repr__() + diff --git a/skyflow/version.py b/skyflow/version.py deleted file mode 100644 index 4d6b1a07..00000000 --- a/skyflow/version.py +++ /dev/null @@ -1 +0,0 @@ -SDK_VERSION = '1.15.1' \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py index d803f19f..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/client/__init__.py b/tests/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/client/test_skyflow.py b/tests/client/test_skyflow.py new file mode 100644 index 00000000..a203b192 --- /dev/null +++ b/tests/client/test_skyflow.py @@ -0,0 +1,325 @@ +import unittest +from unittest.mock import patch + +from skyflow import LogLevel, Env +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow import Skyflow + +VALID_VAULT_CONFIG = { + "vault_id": "VAULT_ID", + "cluster_id": "CLUSTER_ID", + "env": Env.DEV, + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +INVALID_VAULT_CONFIG = { + "cluster_id": "CLUSTER_ID", # Missing vault_id + "env": Env.DEV, + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +VALID_CONNECTION_CONFIG = { + "connection_id": "CONNECTION_ID", + "connection_url": "https://CONNECTION_URL", + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +INVALID_CONNECTION_CONFIG = { + "connection_url": "https://CONNECTION_URL", + # Missing connection_id + "credentials": {"path": "/path/to/valid_credentials.json"} +} + +VALID_CREDENTIALS = { + "path": "/path/to/valid_credentials.json" +} + +class TestSkyflow(unittest.TestCase): + + def setUp(self): + self.builder = Skyflow.builder() + + def test_add_vault_config_success(self): + builder = self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.assertIn(VALID_VAULT_CONFIG, self.builder._Builder__vault_list) + self.assertEqual(builder, self.builder) + + def test_add_already_exists_vault_config(self): + builder = self.builder.add_vault_config(VALID_VAULT_CONFIG) + with self.assertRaises(SkyflowError) as context: + builder.add_vault_config(VALID_VAULT_CONFIG) + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_ALREADY_EXISTS.value.format(VALID_VAULT_CONFIG.get("vault_id"))) + + + def test_add_vault_config_invalid(self): + with self.assertRaises(SkyflowError) as context: + self.builder.add_vault_config(INVALID_VAULT_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_VAULT_ID.value) + + def test_remove_vault_config_valid(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + result = self.builder.remove_vault_config(VALID_VAULT_CONFIG['vault_id']) + + self.assertNotIn(VALID_VAULT_CONFIG['vault_id'], self.builder._Builder__vault_configs) + + @patch('skyflow.client.skyflow.log_error') + def test_remove_vault_config_invalid(self, mock_log_error): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + self.builder.remove_vault_config("invalid_id") + mock_log_error.assert_called_once() + + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_update_vault_config_valid(self, mock_validate): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + updated_config = VALID_VAULT_CONFIG.copy() + updated_config["cluster_id"] = "test.cluster" + self.builder.update_vault_config(updated_config) + mock_validate.assert_called_once() + + def test_get_vault(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + + config = self.builder.get_vault_config(VALID_VAULT_CONFIG["vault_id"]) + + self.assertEqual(self.builder._Builder__vault_list[0], VALID_VAULT_CONFIG) + + def test_get_vault_with_vault_id_none(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + vault = self.builder.get_vault_config(None) + config = vault.get("vault_client").get_config() + self.assertEqual(self.builder._Builder__vault_list[0], config) + + def test_get_vault_with_empty_vault_list_when_vault_id_is_none_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config(None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_VAULT_CONFIGS.value) + + def test_get_vault_with_invalid_vault_id_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config('invalid_id') + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format('invalid_id')) + + def test_get_vault_with_invalid_vault_id_and_non_empty_list_raises_error(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_vault_config('invalid_vault_id') + + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format("invalid_vault_id")) + + + @patch('skyflow.client.skyflow.validate_vault_config') + def test_build_calls_validate_vault_config(self, mock_validate_vault_config): + self.builder.add_vault_config(VALID_VAULT_CONFIG) + self.builder.build() + mock_validate_vault_config.assert_called_once_with(self.builder._Builder__logger, VALID_VAULT_CONFIG) + + def test_add_connection_config_valid(self): + result = self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + + self.assertIn(VALID_CONNECTION_CONFIG, self.builder._Builder__connection_list) + self.assertEqual(result, self.builder) + + def test_add_already_exists_connection_config(self): + connection_id = VALID_CONNECTION_CONFIG.get("connection_id") + builder = self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + + with self.assertRaises(SkyflowError) as context: + builder.add_connection_config(VALID_CONNECTION_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_ALREADY_EXISTS.value.format(connection_id)) + + def test_add_connection_config_invalid(self): + with self.assertRaises(SkyflowError) as context: + self.builder.add_connection_config(INVALID_CONNECTION_CONFIG) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CONNECTION_ID.value) + + def test_remove_connection_config_valid(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + result = self.builder.remove_connection_config(VALID_CONNECTION_CONFIG.get("connection_id")) + + self.assertNotIn(VALID_CONNECTION_CONFIG.get("connection_id"), self.builder._Builder__connection_configs) + + + @patch('skyflow.client.skyflow.log_error') + def test_remove_connection_config_invalid(self, mock_log_error): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + self.builder.remove_connection_config("invalid_id") + mock_log_error.assert_called_once() + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_update_connection_config_valid(self, mock_validate): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + updated_config = VALID_CONNECTION_CONFIG.copy() + updated_config["connection_url"] = "test_url" + self.builder.update_connection_config(updated_config) + mock_validate.assert_called_once() + + def test_get_connection_config(self): + connection_id = VALID_CONNECTION_CONFIG.get("connection_id") + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + + connection = self.builder.get_connection_config(connection_id) + config = connection.get("vault_client").get_config() + self.assertEqual(self.builder._Builder__connection_list[0], config) + + def test_get_connection_config_with_connection_id_none(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + self.builder.get_connection_config(None) + self.assertEqual(self.builder._Builder__connection_list[0], VALID_CONNECTION_CONFIG) + + def test_get_connection_with_empty_connection_list_raises_error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config('invalid_id') + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format('invalid_id')) + + def test_get_connection_with_invalid_connection_id_raises_error(self): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config('invalid_connection_id') + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format('invalid_connection_id')) + + def test_get_connection_with_invalid_connection_id_and_empty_list_raises_Error(self): + self.builder.build() + with self.assertRaises(SkyflowError) as context: + self.builder.get_connection_config(None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.EMPTY_CONNECTION_CONFIGS.value) + + @patch('skyflow.client.skyflow.validate_connection_config') + def test_build_calls_validate_connection_config(self, mock_validate): + self.builder.add_connection_config(VALID_CONNECTION_CONFIG) + self.builder.build() + mock_validate.assert_called_once_with(self.builder._Builder__logger, VALID_CONNECTION_CONFIG) + + + def test_build_valid(self): + self.builder.add_vault_config(VALID_VAULT_CONFIG).add_connection_config(VALID_CONNECTION_CONFIG) + client = self.builder.build() + self.assertIsInstance(client, Skyflow) + + def test_set_log_level(self): + self.builder.set_log_level(LogLevel.INFO) + self.assertEqual(self.builder._Builder__log_level, LogLevel.INFO) + + def test_invalid_credentials(self): + builder = self.builder.add_skyflow_credentials(VALID_CREDENTIALS) + builder.add_connection_config(VALID_CONNECTION_CONFIG) + builder.add_vault_config(VALID_VAULT_CONFIG) + builder.build() + self.assertEqual(VALID_CREDENTIALS, self.builder._Builder__skyflow_credentials) + self.assertEqual(builder, self.builder) + + @patch('skyflow.client.skyflow.validate_vault_config') + def test_skyflow_client_add_remove_vault_config(self, mock_validate_vault_config): + skyflow_client = self.builder.add_vault_config(VALID_VAULT_CONFIG).build() + new_config = VALID_VAULT_CONFIG.copy() + new_config['vault_id'] = "VAULT_ID" + skyflow_client.add_vault_config(new_config) + + assert mock_validate_vault_config.call_count == 2 + + self.assertEqual("VAULT_ID", + skyflow_client.get_vault_config(new_config['vault_id']).get("vault_id")) + + skyflow_client.remove_vault_config(new_config['vault_id']) + with self.assertRaises(SkyflowError) as context: + skyflow_client.get_vault_config(new_config['vault_id']).get("vault_id") + + self.assertEqual(context.exception.message, SkyflowMessages.Error.VAULT_ID_NOT_IN_CONFIG_LIST.value.format( + new_config['vault_id'])) + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_skyflow_client_update_and_get_vault_config(self, mock_update_config): + skyflow_client = self.builder.add_vault_config(VALID_VAULT_CONFIG).build() + new_config = VALID_VAULT_CONFIG.copy() + new_config['env'] = Env.SANDBOX + skyflow_client.update_vault_config(new_config) + mock_update_config.assert_called_once() + + vault = skyflow_client.get_vault_config(VALID_VAULT_CONFIG.get("vault_id")) + + self.assertEqual(VALID_VAULT_CONFIG.get("vault_id"), vault.get("vault_id")) + + @patch('skyflow.client.skyflow.validate_connection_config') + def test_skyflow_client_add_remove_connection_config(self, mock_validate_connection_config): + skyflow_client = self.builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + new_config = VALID_CONNECTION_CONFIG.copy() + new_config['connection_id'] = "CONNECTION_ID" + skyflow_client.add_connection_config(new_config) + + assert mock_validate_connection_config.call_count == 2 + self.assertEqual("CONNECTION_ID", skyflow_client.get_connection_config(new_config['connection_id']).get("connection_id")) + + skyflow_client.remove_connection_config("CONNECTION_ID") + with self.assertRaises(SkyflowError) as context: + skyflow_client.get_connection_config(new_config['connection_id']).get("connection_id") + + self.assertEqual(context.exception.message, SkyflowMessages.Error.CONNECTION_ID_NOT_IN_CONFIG_LIST.value.format(new_config['connection_id'])) + + + @patch('skyflow.vault.client.client.VaultClient.update_config') + def test_skyflow_client_update_and_get_connection_config(self, mock_update_config): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + new_config = VALID_CONNECTION_CONFIG.copy() + new_config['connection_url'] = 'updated_url' + skyflow_client.update_connection_config(new_config) + mock_update_config.assert_called_once() + + connection = skyflow_client.get_connection_config(VALID_CONNECTION_CONFIG.get("connection_id")) + + self.assertEqual(VALID_CONNECTION_CONFIG.get("connection_id"), connection.get("connection_id")) + + def test_skyflow_add_and_update_skyflow_credentials(self): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + skyflow_client.add_skyflow_credentials(VALID_CREDENTIALS) + + self.assertEqual(VALID_CREDENTIALS, builder._Builder__skyflow_credentials) + + new_credentials = VALID_CREDENTIALS.copy() + new_credentials['path'] = 'path/to/new_credentials' + + skyflow_client.update_skyflow_credentials(new_credentials) + + self.assertEqual(new_credentials, builder._Builder__skyflow_credentials) + + + def test_skyflow_add_and_update_log_level(self): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).build() + skyflow_client.set_log_level(LogLevel.INFO) + + self.assertEqual(LogLevel.INFO, builder._Builder__log_level) + + skyflow_client.update_log_level(LogLevel.ERROR) + self.assertEqual(LogLevel.ERROR, builder._Builder__log_level) + + + @patch('skyflow.client.Skyflow.Builder.get_vault_config') + def test_skyflow_vault_and_connection_method(self, mock_get_vault_config): + builder = self.builder + skyflow_client = builder.add_connection_config(VALID_CONNECTION_CONFIG).add_vault_config(VALID_VAULT_CONFIG).build() + skyflow_client.vault() + skyflow_client.connection() + mock_get_vault_config.assert_called_once() \ No newline at end of file diff --git a/tests/service_account/__init__.py b/tests/service_account/__init__.py index d803f19f..e69de29b 100644 --- a/tests/service_account/__init__.py +++ b/tests/service_account/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/service_account/data/invalidJson.json b/tests/service_account/data/invalidJson.json deleted file mode 100644 index d857aa37..00000000 --- a/tests/service_account/data/invalidJson.json +++ /dev/null @@ -1 +0,0 @@ -{"a"} \ No newline at end of file diff --git a/tests/service_account/data/invalidPrivateKey.json b/tests/service_account/data/invalidPrivateKey.json deleted file mode 100644 index 9c2ff417..00000000 --- a/tests/service_account/data/invalidPrivateKey.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "privateKey": "invalid key", - "clientID": "Some client ID", - "keyID": "Some key ID", - "tokenURI": "https://unknown.org" -} \ No newline at end of file diff --git a/tests/service_account/data/noClientID.json b/tests/service_account/data/noClientID.json deleted file mode 100644 index 10699c21..00000000 --- a/tests/service_account/data/noClientID.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "privateKey": "Some private key" -} \ No newline at end of file diff --git a/tests/service_account/data/noKeyID.json b/tests/service_account/data/noKeyID.json deleted file mode 100644 index 9b93da90..00000000 --- a/tests/service_account/data/noKeyID.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "privateKey": "Some private key", - "clientID": "Some client ID" -} \ No newline at end of file diff --git a/tests/service_account/data/noPrivateKey.json b/tests/service_account/data/noPrivateKey.json deleted file mode 100644 index 9b2280ee..00000000 --- a/tests/service_account/data/noPrivateKey.json +++ /dev/null @@ -1 +0,0 @@ -{"data": "this has no data"} \ No newline at end of file diff --git a/tests/service_account/data/noTokenURI.json b/tests/service_account/data/noTokenURI.json deleted file mode 100644 index f7d86552..00000000 --- a/tests/service_account/data/noTokenURI.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "privateKey": "Some private key", - "clientID": "Some client ID", - "keyID": "Some key ID" -} \ No newline at end of file diff --git a/tests/service_account/invalid_creds.json b/tests/service_account/invalid_creds.json new file mode 100644 index 00000000..ab0c0141 --- /dev/null +++ b/tests/service_account/invalid_creds.json @@ -0,0 +1 @@ +// \ No newline at end of file diff --git a/tests/service_account/test__utils.py b/tests/service_account/test__utils.py new file mode 100644 index 00000000..a426fddd --- /dev/null +++ b/tests/service_account/test__utils.py @@ -0,0 +1,149 @@ +import unittest +import time +import jwt +import json +from unittest.mock import patch +import os +from skyflow.error import SkyflowError +from skyflow.service_account import is_expired, generate_bearer_token, \ + generate_bearer_token_from_creds +from skyflow.utils import SkyflowMessages +from skyflow.service_account._utils import get_service_account_token, get_signed_jwt, generate_signed_data_tokens, get_signed_data_token_response_object, generate_signed_data_tokens_from_creds + +creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") +with open(creds_path, 'r') as file: + credentials = json.load(file) + +VALID_CREDENTIALS_STRING = json.dumps(credentials) + +CREDENTIALS_WITHOUT_CLIENT_ID = { + 'privateKey': 'private_key' +} + +CREDENTIALS_WITHOUT_KEY_ID = { + 'privateKey': 'private_key', + 'clientID': 'client_id' +} + +CREDENTIALS_WITHOUT_TOKEN_URI = { + 'privateKey': 'private_key', + 'clientID': 'client_id', + 'keyID': 'key_id' +} + +VALID_SERVICE_ACCOUNT_CREDS = credentials + +class TestServiceAccountUtils(unittest.TestCase): + def test_is_expired_empty_token(self): + self.assertTrue(is_expired("")) + + def test_is_expired_non_expired_token(self): + future_time = time.time() + 1000 + token = jwt.encode({"exp": future_time}, key="test", algorithm="HS256") + self.assertFalse(is_expired(token)) + + def test_is_expired_expired_token(self): + past_time = time.time() - 1000 + token = jwt.encode({"exp": past_time}, key="test", algorithm="HS256") + self.assertTrue(is_expired(token)) + + @patch("skyflow.service_account._utils.log_error") + @patch("jwt.decode", side_effect=Exception("Some error")) + def test_is_expired_general_exception(self, mock_jwt_decode, mock_log_error): + token = jwt.encode({"exp": time.time() + 1000}, key="test", algorithm="HS256") + self.assertTrue(is_expired(token)) + mock_log_error.assert_called_once_with( + SkyflowMessages.Error.JWT_DECODE_ERROR.value, 400, logger=None + ) + + @patch("builtins.open", side_effect=FileNotFoundError) + def test_generate_bearer_token_invalid_file_path(self, mock_open): + with self.assertRaises(SkyflowError) as context: + generate_bearer_token("invalid_path") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value) + + @patch("json.load", side_effect=json.JSONDecodeError("Expecting value", "", 0)) + def test_generate_bearer_token_invalid_json(self, mock_json_load): + creds_path = os.path.join(os.path.dirname(__file__), "invalid_creds.json") + with self.assertRaises(SkyflowError) as context: + generate_bearer_token(creds_path) + self.assertEqual(context.exception.message, SkyflowMessages.Error.FILE_INVALID_JSON.value.format(creds_path)) + + @patch("skyflow.service_account._utils.get_service_account_token") + def test_generate_bearer_token_valid_file_path(self, mock_generate_bearer_token): + creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") + generate_bearer_token(creds_path) + mock_generate_bearer_token.assert_called_once() + + @patch("skyflow.service_account._utils.get_service_account_token") + def test_generate_bearer_token_from_creds_with_valid_json_string(self, mock_generate_bearer_token): + generate_bearer_token_from_creds(VALID_CREDENTIALS_STRING) + mock_generate_bearer_token.assert_called_once() + + def test_generate_bearer_token_from_creds_invalid_json(self): + with self.assertRaises(SkyflowError) as context: + generate_bearer_token_from_creds("invalid_json") + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value) + + def test_get_service_account_token_missing_private_key(self): + incomplete_credentials = {} + with self.assertRaises(SkyflowError) as context: + get_service_account_token(incomplete_credentials, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_PRIVATE_KEY.value) + + def test_get_service_account_token_missing_client_id_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_CLIENT_ID, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_CLIENT_ID.value) + + def test_get_service_account_token_missing_key_id_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_KEY_ID, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_KEY_ID.value) + + def test_get_service_account_token_missing_token_uri_key(self): + with self.assertRaises(SkyflowError) as context: + get_service_account_token(CREDENTIALS_WITHOUT_TOKEN_URI, {}, None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.MISSING_TOKEN_URI.value) + + def test_get_service_account_token_with_valid_credentials(self): + access_token, _ = get_service_account_token(VALID_SERVICE_ACCOUNT_CREDS, {}, None) + self.assertTrue(access_token) + + + @patch("jwt.encode", side_effect=Exception) + def test_get_signed_jwt_invalid_format(self, mock_jwt_encode): + with self.assertRaises(SkyflowError) as context: + get_signed_jwt({}, "client_id", "key_id", "token_uri", "private_key", None) + self.assertEqual(context.exception.message, SkyflowMessages.Error.JWT_INVALID_FORMAT.value) + + def test_get_signed_data_token_response_object(self): + token = "sample_token" + signed_token = "signed_sample_token" + response = get_signed_data_token_response_object(signed_token, token) + self.assertEqual(response[0], token) + self.assertEqual(response[1], signed_token) + + def test_generate_signed_data_tokens_from_file_path(self): + creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") + options = {"data_tokens": ["token1", "token2"], "ctx": 'ctx'} + result = generate_signed_data_tokens(creds_path, options) + self.assertEqual(len(result), 2) + + def test_generate_signed_data_tokens_from_invalid_file_path(self): + options = {"data_tokens": ["token1", "token2"]} + with self.assertRaises(SkyflowError) as context: + result = generate_signed_data_tokens('credentials1.json', options) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIAL_FILE_PATH.value) + + def test_generate_signed_data_tokens_from_creds(self): + options = {"data_tokens": ["token1", "token2"]} + result = generate_signed_data_tokens_from_creds(VALID_CREDENTIALS_STRING, options) + self.assertEqual(len(result), 2) + + def test_generate_signed_data_tokens_from_creds_with_invalid_string(self): + options = {"data_tokens": ["token1", "token2"]} + credentials_string = '{' + with self.assertRaises(SkyflowError) as context: + result = generate_signed_data_tokens_from_creds(credentials_string, options) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CREDENTIALS_STRING.value) \ No newline at end of file diff --git a/tests/service_account/test_generate_bearer_token.py b/tests/service_account/test_generate_bearer_token.py deleted file mode 100644 index 586db1ca..00000000 --- a/tests/service_account/test_generate_bearer_token.py +++ /dev/null @@ -1,171 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os -from dotenv import dotenv_values -from skyflow.service_account import generate_bearer_token, generate_bearer_token_from_creds, generate_bearer_token -from skyflow.errors._skyflow_errors import * -import json -from skyflow.service_account._token import getSignedJWT, getResponseToken, sendRequestWithToken - - -class TestGenerateBearerToken(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join( - os.getcwd(), 'tests/service_account/data/') - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testWithInvalidFilePath(self): - try: - generate_bearer_token('unknownfilepath') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.FILE_NOT_FOUND.value % ('unknownfilepath')) - - def testInvalidJSON(self): - path = self.getDataPath('empty') - try: - generate_bearer_token(path) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.FILE_INVALID_JSON.value % (path)) - - def testWithNoPrivateKey(self): - try: - generate_bearer_token(self.getDataPath('noPrivateKey')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testWithNoClientID(self): - try: - generate_bearer_token(self.getDataPath('noClientID')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_CLIENT_ID.value) - - def testWithNoKeyID(self): - try: - generate_bearer_token(self.getDataPath('noKeyID')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_KEY_ID.value) - - def testWithNoTokenURI(self): - try: - generate_bearer_token(self.getDataPath('noTokenURI')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_TOKEN_URI.value) - - def testInvalidCreds(self): - try: - generate_bearer_token(self.getDataPath('invalidPrivateKey')) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.JWT_INVALID_FORMAT.value) - - def testGenerateBearerTokenFromCredsInvalid(self): - creds_file = open(self.getDataPath('invalidPrivateKey'), 'r') - credentialsString = json.dumps(creds_file.read()) - creds_file.close() - try: - generate_bearer_token_from_creds(credentialsString) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testGenerateBearerTokenFromCredsFail(self): - env_values = dotenv_values('.env') - credentials_path = env_values['CREDENTIALS_FILE_PATH'] - creds_file = open(credentials_path, 'r') - credentialsString = json.dumps(creds_file.read()) - try: - generate_bearer_token_from_creds(credentialsString) - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_PRIVATE_KEY.value) - - def testNonExistentFileArg(self): - try: - generate_bearer_token('non-existent-file.json') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FILE_NOT_FOUND.value % 'non-existent-file.json') - - def testInvalidJSONInCreds(self): - filepath = self.getDataPath('invalidJson') - try: - generate_bearer_token(filepath) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FILE_INVALID_JSON.value % filepath) - try: - generate_bearer_token_from_creds(self.getDataPath('invalid-json')) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_CREDENTIALS.value) - - def testGenerateToken(self): - try: - generate_bearer_token(self.getDataPath('invalid-json')) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - - def testGetSignedJWTInvalidValue(self): - try: - getSignedJWT('{}clientID', 'keyId', - 'privateKey', 'ww.tokenURI.com') - self.fail('invalid jwt signed') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.JWT_INVALID_FORMAT.value) - - def testGetResponseTokenNoType(self): - try: - getResponseToken({'accessToken': 'only access token'}) - self.fail('Should throw') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.SERVER_ERROR.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_TOKEN_TYPE.value) - - def testGetResponseTokenNoAccessToken(self): - try: - getResponseToken({'tokenType': 'only token type'}) - self.fail('Should throw') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.SERVER_ERROR.value) - self.assertEqual( - se.message, SkyflowErrorMessages.MISSING_ACCESS_TOKEN.value) - - def testSendRequestInvalidUrl(self): - try: - sendRequestWithToken('invalidurl', 'invalid-token') - self.fail('Not throwing on invalid url') - except SkyflowError as se: - self.assertEqual(se.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - se.message, SkyflowErrorMessages.INVALID_URL.value % 'invalidurl') diff --git a/tests/service_account/test_sa_token_utils.py b/tests/service_account/test_sa_token_utils.py deleted file mode 100644 index 737439f2..00000000 --- a/tests/service_account/test_sa_token_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import os -import unittest -from dotenv import dotenv_values - -from skyflow.service_account._token import * -from skyflow.service_account import is_expired - - -class TestGenerateBearerToken(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join( - os.getcwd(), 'tests/service_account/data/') - return super().setUp() - - def testIsExpiredInvalidToken(self): - try: - token = 'invalid token' - self.assertEqual(True, is_expired(token)) - except SkyflowError as se: - self.fail('raised exception for invalid token') - - def testIsExpiredEmptyToken(self): - try: - self.assertEqual(True, is_expired('')) - except SkyflowError as se: - self.fail('Error '+str(se.message)) - - def testIsExpiredTokenExpred(self): - expiredToken = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJodHRwczovL21hbmFnZS5za3lmbG93YXBpcy5jb20iLCJjbGkiOiJrOWZkN2ZiMzcyMDI0NDhiYmViOGNkNmUyYzQ4NTdkOSIsImV4cCI6MTY0NzI1NjM3NCwiaWF0IjoxNjQ3MjU2MzE1LCJpc3MiOiJzYS1hdXRoQG1hbmFnZS5za3lmbG93YXBpcy5jb20iLCJqdGkiOiJnYTMyZWJhMGJlMzQ0YWRmYjQxMzRjN2Y2ZTIzZjllMCIsInNjcCI6WyJyb2xlOnM1OTdjNzNjYjhjOTRlMjk4YzhlZjZjNzE0M2U0OWMyIl0sInN1YiI6InRlc3Qgc3ZjIGFjYyJ9.OrkSyNtXOVtfL3JNYaArlmUFg0txJFV6o3SE_wadPwZ_h1BtMuoKPo1LOAe-4HhS16i34HcfTTiHmg2ksx5KbD_sdx1intaDWZGXs-6TPvDK8mdFrBblp3nP1y1O_PHEnCMmPD3haZVMj_9jyTKPb6R8qBbMjr-UzXAUCCTiq9XqEd81wY8FsZeKwSQFqbdFdECaPsk8m-k8s7BKc_VLtHXdYXp4vNgjgleSeX4nHHhU1w0y18q2_tPwgLG-MZ2I7pF60Owk9T7f7gSuCpVfa6zYvpYiYFjQayFmYc6tJgEuOyGD_VFKKUUW4TszeNyJOCF15dPDO2JIeGh3xDJ8PA' - try: - self.assertEqual(True, is_expired(expiredToken)) - except SkyflowError: - self.fail('raised error for expired token') diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils/logger/__init__.py b/tests/utils/logger/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils/logger/test__log_helpers.py b/tests/utils/logger/test__log_helpers.py new file mode 100644 index 00000000..1ea50d45 --- /dev/null +++ b/tests/utils/logger/test__log_helpers.py @@ -0,0 +1,86 @@ +import unittest +from unittest.mock import Mock, patch + +from skyflow import LogLevel +from skyflow.utils.logger import log_info, log_error + + +class TestLoggingFunctions(unittest.TestCase): + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_info_with_logger(self, MockLogger): + mock_logger = MockLogger() + message = "Info message" + interface = "InterfaceA" + + log_info(message, mock_logger) + + mock_logger.info.assert_called_once_with(f"{message}") + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_info_without_logger(self, MockLogger): + try: + log_info("Message", None) + except AttributeError: + self.fail("log_info raised AttributeError unexpectedly!") + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_with_all_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Error message" + http_code = 404 + grpc_code = 5 + http_status = "Not Found" + request_id = "12345" + details = {"info": "Detailed error information"} + + log_error(message, http_code, request_id, grpc_code, http_status, details, mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message, + 'grpc_code': grpc_code, + 'http_status': http_status, + 'request_id': request_id, + 'details': details + } + + mock_logger.error.assert_called_once_with(expected_log_data) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_with_minimal_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Minimal error" + http_code = 400 + + log_error(message, http_code, logger=mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message + } + + mock_logger.error.assert_called_once_with(expected_log_data) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_creates_logger_if_none(self, MockLogger): + message = "Auto-created logger error" + http_code = 500 + + log_error(message, http_code) + + MockLogger.assert_called_once_with(LogLevel.ERROR) + + @patch('skyflow.utils.logger._log_helpers.Logger') + def test_log_error_handles_missing_optional_fields(self, MockLogger): + mock_logger = MockLogger() + message = "Test missing optional fields" + http_code = 503 + + log_error(message, http_code, logger=mock_logger) + + expected_log_data = { + 'http_code': http_code, + 'message': message + } + mock_logger.error.assert_called_once_with(expected_log_data) diff --git a/tests/utils/logger/test__logger.py b/tests/utils/logger/test__logger.py new file mode 100644 index 00000000..cdfcf13f --- /dev/null +++ b/tests/utils/logger/test__logger.py @@ -0,0 +1,101 @@ +import unittest +from unittest.mock import patch, Mock +import logging +from skyflow import LogLevel +from skyflow.utils.logger import Logger + + +class TestLogger(unittest.TestCase): + + @patch('logging.getLogger') + def test_logger_initialization_with_default_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger() + + self.assertEqual(logger.current_level, LogLevel.ERROR) + mock_logger_instance.setLevel.assert_called_once_with(logging.ERROR) + + @patch('logging.getLogger') + def test_logger_initialization_with_custom_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.INFO) + + self.assertEqual(logger.current_level, LogLevel.INFO) + mock_logger_instance.setLevel.assert_called_once_with(logging.INFO) + + @patch('logging.getLogger') + def test_set_log_level(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger() + logger.set_log_level(LogLevel.DEBUG) + + self.assertEqual(logger.current_level, LogLevel.DEBUG) + mock_logger_instance.setLevel.assert_called_with(logging.DEBUG) + + @patch('logging.getLogger') + def test_debug_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.DEBUG) + logger.debug("Debug message") + + mock_logger_instance.debug.assert_called_once_with("Debug message") + + @patch('logging.getLogger') + def test_info_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.INFO) + logger.info("Info message") + + mock_logger_instance.info.assert_called_once_with("Info message") + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_warn_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.WARN) + logger.warn("Warn message") + + mock_logger_instance.warning.assert_called_once_with("Warn message") + mock_logger_instance.info.assert_not_called() + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_error_logging(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.ERROR) + logger.error("Error message") + + mock_logger_instance.error.assert_called_once_with("Error message") + mock_logger_instance.warning.assert_not_called() + mock_logger_instance.info.assert_not_called() + mock_logger_instance.debug.assert_not_called() + + @patch('logging.getLogger') + def test_logging_with_level_off(self, mock_get_logger): + mock_logger_instance = Mock() + mock_get_logger.return_value = mock_logger_instance + + logger = Logger(LogLevel.OFF) + logger.debug("Debug message") + logger.info("Info message") + logger.warn("Warn message") + logger.error("Error message") + + mock_logger_instance.debug.assert_not_called() + mock_logger_instance.info.assert_not_called() + mock_logger_instance.warning.assert_not_called() + mock_logger_instance.error.assert_not_called() \ No newline at end of file diff --git a/tests/utils/test__helpers.py b/tests/utils/test__helpers.py new file mode 100644 index 00000000..8b55abf3 --- /dev/null +++ b/tests/utils/test__helpers.py @@ -0,0 +1,38 @@ +import unittest +from skyflow.utils import get_base_url, format_scope + +VALID_URL = "https://example.com/path?query=1" +BASE_URL = "https://example.com" +EMPTY_URL = "" +INVALID_URL = "invalid-url" +SCOPES_LIST = ["admin", "user", "viewer"] +FORMATTED_SCOPES = "role:admin role:user role:viewer" + +class TestHelperFunctions(unittest.TestCase): + def test_get_base_url_valid_url(self): + self.assertEqual(get_base_url(VALID_URL), BASE_URL) + + def test_get_base_url_empty_url(self): + self.assertEqual(get_base_url(EMPTY_URL), "://") + + def test_get_base_url_invalid_url(self): + self.assertEqual(get_base_url(INVALID_URL), "://") + + def test_format_scope_valid_scopes(self): + self.assertEqual(format_scope(SCOPES_LIST), FORMATTED_SCOPES) + + def test_format_scope_empty_list(self): + self.assertIsNone(format_scope([])) + + def test_format_scope_none(self): + self.assertIsNone(format_scope(None)) + + def test_format_scope_single_scope(self): + single_scope = ["admin"] + expected_result = "role:admin" + self.assertEqual(format_scope(single_scope), expected_result) + + def test_format_scope_special_characters(self): + scopes_with_special_chars = ["admin", "user:write", "read-only"] + expected_result = "role:admin role:user:write role:read-only" + self.assertEqual(format_scope(scopes_with_special_chars), expected_result) \ No newline at end of file diff --git a/tests/utils/test__utils.py b/tests/utils/test__utils.py new file mode 100644 index 00000000..368d6367 --- /dev/null +++ b/tests/utils/test__utils.py @@ -0,0 +1,386 @@ +import unittest +from unittest.mock import patch, Mock +import os +import json +from requests import PreparedRequest +from requests.models import HTTPError +from skyflow.error import SkyflowError +from skyflow.utils import get_credentials, SkyflowMessages, get_vault_url, construct_invoke_connection_request, \ + parse_insert_response, parse_update_record_response, parse_delete_response, parse_get_response, \ + parse_detokenize_response, parse_tokenize_response, parse_query_response, parse_invoke_connection_response, \ + handle_exception, validate_api_key +from skyflow.utils._utils import parse_path_params, to_lowercase_keys, get_metrics +from skyflow.utils.enums import EnvUrls, Env, ContentType +from skyflow.vault.connection import InvokeConnectionResponse +from skyflow.vault.data import InsertResponse, DeleteResponse, GetResponse, QueryResponse +from skyflow.vault.tokens import DetokenizeResponse, TokenizeResponse + +creds_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "credentials.json") +with open(creds_path, 'r') as file: + credentials = json.load(file) + +TEST_ERROR_MESSAGE = "Test error message." +VALID_ENV_CREDENTIALS = credentials + +class TestUtils(unittest.TestCase): + + @patch.dict(os.environ, {"SKYFLOW_CREDENTIALS": json.dumps(VALID_ENV_CREDENTIALS)}) + def test_get_credentials_env_variable(self): + credentials = get_credentials() + credentials_string = credentials.get('credentials_string') + self.assertEqual(credentials_string, json.dumps(VALID_ENV_CREDENTIALS).replace('\n', '\\n')) + + def test_get_credentials_with_config_level_creds(self): + test_creds = {"authToken": "test_token"} + creds = get_credentials(config_level_creds=test_creds) + self.assertEqual(creds, test_creds) + + def test_get_credentials_with_common_creds(self): + test_creds = {"authToken": "test_token"} + creds = get_credentials(common_skyflow_creds=test_creds) + self.assertEqual(creds, test_creds) + + def test_get_vault_url_valid(self): + valid_cluster_id = "testCluster" + valid_env = Env.DEV + valid_vault_id = "vault123" + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + expected_url = f"https://{valid_cluster_id}.vault.skyflowapis.dev" + self.assertEqual(url, expected_url) + + def test_get_vault_url_with_invalid_cluster_id(self): + valid_cluster_id = "" + valid_env = Env.DEV + valid_vault_id = "vault123" + with self.assertRaises(SkyflowError) as context: + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_CLUSTER_ID.value.format(valid_vault_id)) + + def test_get_vault_url_with_invalid_env(self): + valid_cluster_id = "cluster_id" + valid_env =EnvUrls.DEV + valid_vault_id = "vault123" + with self.assertRaises(SkyflowError) as context: + url = get_vault_url(valid_cluster_id, valid_env, valid_vault_id) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_ENV.value.format(valid_vault_id)) + + def test_parse_path_params(self): + url = "https://example.com/{param1}/{param2}" + path_params = {"param1": "value1", "param2": "value2"} + parsed_url = parse_path_params(url, path_params) + self.assertEqual(parsed_url, "https://example.com/value1/value2") + + def test_to_lowercase_keys(self): + input_dict = {"Key1": "value1", "KEY2": "value2"} + expected_output = {"key1": "value1", "key2": "value2"} + self.assertEqual(to_lowercase_keys(input_dict), expected_output) + + def test_get_metrics(self): + metrics = get_metrics() + self.assertIn('sdk_name_version', metrics) + self.assertIn('sdk_client_device_model', metrics) + self.assertIn('sdk_client_os_details', metrics) + self.assertIn('sdk_runtime_details', metrics) + + + def test_construct_invoke_connection_request_valid(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.JSON.value} + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + expected_url = parse_path_params(connection_url, mock_connection_request.path_params) + "?query=test" + self.assertEqual(result.url, expected_url) + + self.assertEqual(result.method, "POST") + self.assertEqual(result.headers['Content-Type'], ContentType.JSON.value) + + self.assertEqual(result.body, json.dumps(mock_connection_request.body)) + + def test_construct_invoke_connection_request_with_invalid_headers(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = [] + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + with self.assertRaises(SkyflowError) as context: + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value) + + def test_construct_invoke_connection_request_with_invalid_request_body(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.JSON.value} + mock_connection_request.body = [] + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + with self.assertRaises(SkyflowError) as context: + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_BODY.value) + + def test_construct_invoke_connection_request_with_url_encoded_content_type(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.URLENCODED.value} + mock_connection_request.body = {"key": "value"} + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + def test_construct_invoke_connection_request_with_form_date_content_type(self): + mock_connection_request = Mock() + mock_connection_request.path_params = {"param1": "value1"} + mock_connection_request.headers = {"Content-Type": ContentType.FORMDATA.value} + mock_connection_request.body = { + "name": (None, "John Doe") + } + mock_connection_request.method.value = "POST" + mock_connection_request.query_params = {"query": "test"} + + connection_url = "https://example.com/{param1}/endpoint" + + result = construct_invoke_connection_request(mock_connection_request, connection_url, logger=None) + + self.assertIsInstance(result, PreparedRequest) + + def test_parse_insert_response(self): + api_response = Mock() + api_response.responses = [ + {"Status": 200, "Body": {"records": [{"skyflow_id": "id1"}]}}, + {"Status": 400, "Body": {"error": TEST_ERROR_MESSAGE}} + ] + result = parse_insert_response(api_response, continue_on_error=True) + self.assertEqual(len(result.inserted_fields), 1) + self.assertEqual(len(result.errors), 1) + + def test_parse_insert_response_continue_on_error_false(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(skyflow_id="id_1", tokens={"token1": "token_value1"}), + Mock(skyflow_id="id_2", tokens={"token2": "token_value2"}) + ] + + result = parse_insert_response(mock_api_response, continue_on_error=False) + + self.assertIsInstance(result, InsertResponse) + + expected_inserted_fields = [ + {"skyflow_id": "id_1", "token1": "token_value1"}, + {"skyflow_id": "id_2", "token2": "token_value2"} + ] + self.assertEqual(result.inserted_fields, expected_inserted_fields) + + self.assertEqual(result.errors, []) + + def test_parse_update_record_response(self): + api_response = Mock() + api_response.skyflow_id = "id1" + api_response.tokens = {"token1": "value1"} + result = parse_update_record_response(api_response) + self.assertEqual(result.updated_field['skyflow_id'], "id1") + self.assertEqual(result.updated_field['token1'], "value1") + + def test_parse_delete_response_successful(self): + mock_api_response = Mock() + mock_api_response.record_id_response = ["id_1", "id_2", "id_3"] + + result = parse_delete_response(mock_api_response) + + self.assertIsInstance(result, DeleteResponse) + + expected_deleted_ids = ["id_1", "id_2", "id_3"] + self.assertEqual(result.deleted_ids, expected_deleted_ids) + + self.assertEqual(result.errors, []) + + def test_parse_get_response_successful(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={'field1': 'value1', 'field2': 'value2'}), + Mock(fields={'field1': 'value3', 'field2': 'value4'}) + ] + + result = parse_get_response(mock_api_response) + + self.assertIsInstance(result, GetResponse) + + expected_data = [ + {'field1': 'value1', 'field2': 'value2'}, + {'field1': 'value3', 'field2': 'value4'} + ] + self.assertEqual(result.data, expected_data) + + self.assertEqual(result.errors, []) + + def test_parse_detokenize_response_with_mixed_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1", value="value1", value_type=Mock(value="Type1"), error=None), + Mock(token="token2", value=None, value_type=None, error="Some error"), + Mock(token="token3", value="value3", value_type=Mock(value="Type2"), error=None), + ] + + result = parse_detokenize_response(mock_api_response) + self.assertIsInstance(result, DetokenizeResponse) + + expected_detokenized_fields = [ + {"token": "token1", "value": "value1", "type": "Type1"}, + {"token": "token3", "value": "value3", "type": "Type2"} + ] + + expected_errors = [ + {"token": "token2", "error": "Some error"} + ] + + self.assertEqual(result.detokenized_fields, expected_detokenized_fields) + self.assertEqual(result.errors, expected_errors) + + def test_parse_tokenize_response_with_valid_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1"), + Mock(token="token2"), + Mock(token="token3"), + ] + + result = parse_tokenize_response(mock_api_response) + self.assertIsInstance(result, TokenizeResponse) + + expected_tokenized_fields = [ + {"token": "token1"}, + {"token": "token2"}, + {"token": "token3"} + ] + + self.assertEqual(result.tokenized_fields, expected_tokenized_fields) + + def test_parse_query_response_with_valid_records(self): + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + result = parse_query_response(mock_api_response) + + self.assertIsInstance(result, QueryResponse) + + expected_fields = [ + {"field1": "value1", "field2": "value2", "tokenized_data": {}}, + {"field1": "value3", "field2": "value4", "tokenized_data": {}} + ] + + self.assertEqual(result.fields, expected_fields) + + @patch("requests.Response") + def test_parse_invoke_connection_response_successful(self, mock_response): + mock_response.status_code = 200 + mock_response.content = json.dumps({"key": "value"}).encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + result = parse_invoke_connection_response(mock_response) + + self.assertIsInstance(result, InvokeConnectionResponse) + self.assertEqual(result.response["key"], "value") + self.assertEqual(result.response["request_id"], "1234") + + @patch("requests.Response") + def test_parse_invoke_connection_response_json_decode_error(self, mock_response): + + mock_response.status_code = 200 + mock_response.content = "Non-JSON Content".encode('utf-8') + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Non-JSON Content")) + + @patch("requests.Response") + def test_parse_invoke_connection_response_http_error_with_json_error_message(self, mock_response): + mock_response.status_code = 404 + mock_response.content = json.dumps({"error": {"message": "Not Found"}}).encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + mock_response.raise_for_status.side_effect = HTTPError("404 Error") + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, "Not Found - request id: 1234") + + @patch("requests.Response") + def test_parse_invoke_connection_response_http_error_without_json_error_message(self, mock_response): + mock_response.status_code = 500 + mock_response.content = "Internal Server Error".encode('utf-8') + mock_response.headers = {"x-request-id": "1234"} + + mock_response.raise_for_status.side_effect = HTTPError("500 Error") + + with self.assertRaises(SkyflowError) as context: + parse_invoke_connection_response(mock_response) + + self.assertEqual(context.exception.message, SkyflowMessages.Error.RESPONSE_NOT_JSON.value.format("Internal Server Error") + " - request id: 1234") + + @patch("skyflow.utils._utils.log_and_reject_error") + def test_handle_exception_json_error(self, mock_log_and_reject_error): + + mock_error = Mock() + mock_error.headers = { + 'x-request-id': '1234', + 'content-type': 'application/json' + } + mock_error.body = json.dumps({ + "error": { + "message": "JSON error occurred.", + "http_code": 400, + "http_status": "Bad Request", + "grpc_code": "8", + "details": "Detailed message" + } + }).encode('utf-8') + mock_logger = Mock() + + handle_exception(mock_error, mock_logger) + + mock_log_and_reject_error.assert_called_once_with( + "JSON error occurred.", + 400, + "1234", + "Bad Request", + "8", + "Detailed message", + logger=mock_logger + ) + + def test_validate_api_key_valid_key(self): + valid_key = "sky-ABCDE-1234567890abcdef1234567890abcdef" + self.assertTrue(validate_api_key(valid_key)) + + def test_validate_api_key_invalid_length(self): + invalid_key = "sky-ABCDE-123" + self.assertFalse(validate_api_key(invalid_key)) + + def test_validate_api_key_invalid_pattern(self): + invalid_key = "sky-ABCDE-1234567890GHIJKL7890abcdef" + self.assertFalse(validate_api_key(invalid_key)) diff --git a/tests/vault/__init__.py b/tests/vault/__init__.py index d803f19f..e69de29b 100644 --- a/tests/vault/__init__.py +++ b/tests/vault/__init__.py @@ -1,3 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' diff --git a/tests/vault/client/__init__.py b/tests/vault/client/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/vault/client/test__client.py b/tests/vault/client/test__client.py new file mode 100644 index 00000000..cc2e2d42 --- /dev/null +++ b/tests/vault/client/test__client.py @@ -0,0 +1,105 @@ +import unittest +from unittest.mock import patch, MagicMock +from skyflow.generated.rest import Configuration +from skyflow.vault.client.client import VaultClient + +CONFIG = { + "credentials": "some_credentials", + "cluster_id": "test_cluster_id", + "env": "test_env", + "vault_id": "test_vault_id", + "roles": ["role_id_1", "role_id_2"], + "ctx": "context" +} + +CREDENTIALS_WITH_API_KEY = {"api_key": "dummy_api_key"} + +class TestVaultClient(unittest.TestCase): + def setUp(self): + self.vault_client = VaultClient(CONFIG) + + def test_set_common_skyflow_credentials(self): + credentials = {"api_key": "dummy_api_key"} + self.vault_client.set_common_skyflow_credentials(credentials) + self.assertEqual(self.vault_client.get_common_skyflow_credentials(), credentials) + + def test_set_logger(self): + mock_logger = MagicMock() + self.vault_client.set_logger("INFO", mock_logger) + self.assertEqual(self.vault_client.get_log_level(), "INFO") + self.assertEqual(self.vault_client.get_logger(), mock_logger) + + @patch("skyflow.vault.client.client.get_credentials") + @patch("skyflow.vault.client.client.get_vault_url") + @patch("skyflow.vault.client.client.Configuration") + @patch("skyflow.vault.client.client.VaultClient.initialize_api_client") + def test_initialize_client_configuration(self, mock_init_api_client, mock_config, mock_get_vault_url, + mock_get_credentials): + mock_get_credentials.return_value = (CREDENTIALS_WITH_API_KEY) + mock_get_vault_url.return_value = "https://test-vault-url.com" + + self.vault_client.initialize_client_configuration() + + mock_get_credentials.assert_called_once_with(CONFIG["credentials"], None, logger=None) + mock_get_vault_url.assert_called_once_with(CONFIG["cluster_id"], CONFIG["env"], CONFIG["vault_id"], logger=None) + mock_config.assert_called_once_with(host="https://test-vault-url.com", access_token="dummy_api_key") + mock_init_api_client.assert_called_once() + + @patch("skyflow.vault.client.client.ApiClient") + def test_initialize_api_client(self, mock_api_client): + config = Configuration() + self.vault_client.initialize_api_client(config) + mock_api_client.assert_called_once_with(config) + + @patch("skyflow.vault.client.client.RecordsApi") + def test_get_records_api(self, mock_records_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_records_api() + mock_records_api.assert_called_once() + + @patch("skyflow.vault.client.client.TokensApi") + def test_get_tokens_api(self, mock_tokens_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_tokens_api() + mock_tokens_api.assert_called_once() + + @patch("skyflow.vault.client.client.QueryApi") + def test_get_query_api(self, mock_query_api): + self.vault_client.initialize_api_client(Configuration()) + self.vault_client.get_query_api() + mock_query_api.assert_called_once() + + def test_get_vault_id(self): + self.assertEqual(self.vault_client.get_vault_id(), CONFIG["vault_id"]) + + @patch("skyflow.vault.client.client.generate_bearer_token") + @patch("skyflow.vault.client.client.generate_bearer_token_from_creds") + @patch("skyflow.vault.client.client.log_info") + def test_get_bearer_token_with_api_key(self, mock_log_info, mock_generate_bearer_token, + mock_generate_bearer_token_from_creds): + token = self.vault_client.get_bearer_token(CREDENTIALS_WITH_API_KEY) + self.assertEqual(token, CREDENTIALS_WITH_API_KEY["api_key"]) + + def test_update_config(self): + new_config = {"credentials": "new_credentials"} + self.vault_client.update_config(new_config) + self.assertTrue(self.vault_client._VaultClient__is_config_updated) + self.assertEqual(self.vault_client.get_config()["credentials"], "new_credentials") + + def test_get_config(self): + self.assertEqual(self.vault_client.get_config(), CONFIG) + + def test_get_common_skyflow_credentials(self): + credentials = {"api_key": "dummy_api_key"} + self.vault_client.set_common_skyflow_credentials(credentials) + self.assertEqual(self.vault_client.get_common_skyflow_credentials(), credentials) + + def test_get_log_level(self): + log_level = "DEBUG" + self.vault_client.set_logger(log_level, MagicMock()) + self.assertEqual(self.vault_client.get_log_level(), log_level) + + def test_get_logger(self): + mock_logger = MagicMock() + self.vault_client.set_logger("INFO", mock_logger) + self.assertEqual(self.vault_client.get_logger(), mock_logger) \ No newline at end of file diff --git a/tests/vault/controller/__init__.py b/tests/vault/controller/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/vault/controller/test__connection.py b/tests/vault/controller/test__connection.py new file mode 100644 index 00000000..0bd3d293 --- /dev/null +++ b/tests/vault/controller/test__connection.py @@ -0,0 +1,104 @@ +import unittest +from unittest.mock import Mock, patch + +from skyflow.error import SkyflowError +from skyflow.utils import SkyflowMessages +from skyflow.utils.enums import RequestMethod +from skyflow.vault.connection import InvokeConnectionRequest +from skyflow.vault.controller import Connection + +VALID_BEARER_TOKEN = "test_bearer_token" +VAULT_CONFIG = { + "credentials": {"api_key": "test_api_key"}, + "connection_url": "https://CONNECTION_URL" +} +SUCCESS_STATUS_CODE = 200 +SUCCESS_RESPONSE_CONTENT = '{"response": "success"}' +VALID_BODY = {"key": "value"} +VALID_PATH_PARAMS = {"path_key": "value"} +VALID_HEADERS = {"Content-Type": "application/json"} +VALID_QUERY_PARAMS = {"query_key": "value"} +INVALID_HEADERS = "invalid_headers" +INVALID_BODY = "invalid_body" +FAILURE_STATUS_CODE = 400 +ERROR_RESPONSE_CONTENT = '{"error": {"message": "error occurred"}}' + +class TestConnection(unittest.TestCase): + def setUp(self): + self.mock_vault_client = Mock() + self.mock_vault_client.get_config.return_value = VAULT_CONFIG + self.mock_vault_client.get_bearer_token.return_value = VALID_BEARER_TOKEN + self.connection = Connection(self.mock_vault_client) + + @patch('requests.Session.send') + def test_invoke_success(self, mock_send): + # Mocking successful response + mock_response = Mock() + mock_response.status_code = SUCCESS_STATUS_CODE + mock_response.content = SUCCESS_RESPONSE_CONTENT + mock_response.headers = {'x-request-id': 'test-request-id'} + mock_send.return_value = mock_response + + request = InvokeConnectionRequest( + method=RequestMethod.POST, + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + # Test invoke method + response = self.connection.invoke(request) + + # Assertions for successful invocation + self.assertEqual(response.response, {"response": "success", "request_id": "test-request-id"}) + self.mock_vault_client.get_bearer_token.assert_called_once() + + @patch('requests.Session.send') + def test_invoke_invalid_headers(self, mock_send): + request = InvokeConnectionRequest( + method="POST", + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=INVALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_HEADERS.value) + + @patch('requests.Session.send') + def test_invoke_invalid_body(self, mock_send): + request = InvokeConnectionRequest( + method="POST", + body=INVALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVALID_REQUEST_BODY.value) + + @patch('requests.Session.send') + def test_invoke_request_error(self, mock_send): + mock_response = Mock() + mock_response.status_code = FAILURE_STATUS_CODE + mock_response.content = ERROR_RESPONSE_CONTENT + mock_send.return_value = mock_response + + request = InvokeConnectionRequest( + method=RequestMethod.POST, + body=VALID_BODY, + path_params=VALID_PATH_PARAMS, + headers=VALID_HEADERS, + query_params=VALID_QUERY_PARAMS + ) + + with self.assertRaises(SkyflowError) as context: + self.connection.invoke(request) + self.assertEqual(context.exception.message, SkyflowMessages.Error.INVOKE_CONNECTION_FAILED.value) + + diff --git a/tests/vault/controller/test__vault.py b/tests/vault/controller/test__vault.py new file mode 100644 index 00000000..933e713e --- /dev/null +++ b/tests/vault/controller/test__vault.py @@ -0,0 +1,454 @@ +import unittest +from unittest.mock import Mock, patch + +from skyflow.generated.rest import RecordServiceBatchOperationBody, V1BatchRecord, RecordServiceInsertRecordBody, \ + V1FieldRecords, RecordServiceUpdateRecordBody, RecordServiceBulkDeleteRecordBody, QueryServiceExecuteQueryBody, \ + V1DetokenizeRecordRequest, V1DetokenizePayload, V1TokenizePayload, V1TokenizeRecordRequest, RedactionEnumREDACTION +from skyflow.utils.enums import RedactionType, TokenMode +from skyflow.vault.controller import Vault +from skyflow.vault.data import InsertRequest, InsertResponse, UpdateResponse, UpdateRequest, DeleteResponse, \ + DeleteRequest, GetRequest, GetResponse, QueryRequest, QueryResponse +from skyflow.vault.tokens import DetokenizeRequest, DetokenizeResponse, TokenizeResponse, TokenizeRequest + +VAULT_ID = "test_vault_id" +TABLE_NAME = "test_table" + +class TestVault(unittest.TestCase): + + def setUp(self): + # Mock vault client + self.vault_client = Mock() + self.vault_client.get_vault_id.return_value = VAULT_ID + self.vault_client.get_logger.return_value = Mock() + + # Create a Vault instance with the mock client + self.vault = Vault(self.vault_client) + + @patch("skyflow.vault.controller._vault.validate_insert_request") + @patch("skyflow.vault.controller._vault.parse_insert_response") + def test_insert_with_continue_on_error(self, mock_parse_response, mock_validate): + """Test insert functionality when continue_on_error is True.""" + + # Mock request + request = InsertRequest( + table_name=TABLE_NAME, + values=[{"field": "value"}], + tokens=None, + return_tokens=True, + upsert='column_name', + continue_on_error=True + ) + + expected_body = RecordServiceBatchOperationBody( + records=[ + V1BatchRecord( + fields={"field": "value"}, + table_name=TABLE_NAME, + method="POST", + tokenization=True, + upsert="column_name" + ) + ], + continue_on_error=True, + byot="DISABLE" + ) + + # Mock API response to contain a mix of successful and failed insertions + mock_api_response = Mock() + mock_api_response.responses = [ + {"Status": 200, "Body": {"records": [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}]}}, + {"Status": 400, "Body": {"error": "Insert error for record 2"}} + ] + + # Expected parsed response + expected_inserted_fields = [ + {'skyflow_id': 'id1', 'request_index': 0, 'token_field': 'token_val1'} + ] + expected_errors = [ + {'request_index': 1, 'error': 'Insert error for record 2'} + ] + expected_response = InsertResponse(inserted_fields=expected_inserted_fields, errors=expected_errors) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_batch_operation.return_value = mock_api_response + + # Call the insert function + result = self.vault.insert(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_batch_operation.assert_called_once_with(VAULT_ID, expected_body) + mock_parse_response.assert_called_once_with(mock_api_response, True) + + # Assert that the result matches the expected InsertResponse + self.assertEqual(result.inserted_fields, expected_inserted_fields) + self.assertEqual(result.errors, expected_errors) + + @patch("skyflow.vault.controller._vault.validate_insert_request") + @patch("skyflow.vault.controller._vault.parse_insert_response") + def test_insert_with_continue_on_error_false(self, mock_parse_response, mock_validate): + """Test insert functionality when continue_on_error is False, ensuring a single bulk insert.""" + + # Mock request with continue_on_error set to False + request = InsertRequest( + table_name=TABLE_NAME, + values=[{"field": "value"}], + tokens=None, + return_tokens=True, + upsert=None, + homogeneous=True, + continue_on_error=False + ) + + # Expected API request body based on InsertRequest parameters + expected_body = RecordServiceInsertRecordBody( + records=[ + V1FieldRecords(fields={"field": "value"}) + ], + tokenization=True, + upsert=None, + homogeneous=True + ) + + # Mock API response for a successful insert + mock_api_response = Mock() + mock_api_response.records = [{"skyflow_id": "id1", "tokens": {"token_field": "token_val1"}}] + + # Expected parsed response + expected_inserted_fields = [{'skyflow_id': 'id1', 'token_field': 'token_val1'}] + expected_response = InsertResponse(inserted_fields=expected_inserted_fields) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_insert_record.return_value = mock_api_response + + # Call the insert function + result = self.vault.insert(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_insert_record.assert_called_once_with(VAULT_ID, TABLE_NAME, + expected_body) + mock_parse_response.assert_called_once_with(mock_api_response, False) + + # Assert that the result matches the expected InsertResponse + self.assertEqual(result.inserted_fields, expected_inserted_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_update_request") + @patch("skyflow.vault.controller._vault.parse_update_record_response") + def test_update_successful(self, mock_parse_response, mock_validate): + """Test update functionality for a successful update request.""" + + # Mock request + request = UpdateRequest( + table=TABLE_NAME, + data={"skyflow_id": "12345", "field": "new_value"}, + tokens=None, + return_tokens=True, + token_mode=TokenMode.DISABLE + ) + + # Expected payload + expected_payload = RecordServiceUpdateRecordBody( + record=V1FieldRecords( + fields={"field": "new_value"}, + tokens=request.tokens + ), + tokenization=request.return_tokens, + byot=request.token_mode.value + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.skyflow_id = "12345" + mock_api_response.tokens = {"token_field": "token_value"} + + # Expected parsed response + expected_updated_field = {'skyflow_id': "12345", 'token_field': "token_value"} + expected_response = UpdateResponse(updated_field=expected_updated_field) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_update_record.return_value = mock_api_response + + # Call the update function + result = self.vault.update(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_update_record.assert_called_once_with( + VAULT_ID, + request.table, + request.data["skyflow_id"], + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected UpdateResponse + self.assertEqual(result.updated_field, expected_updated_field) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_delete_request") + @patch("skyflow.vault.controller._vault.parse_delete_response") + def test_delete_successful(self, mock_parse_response, mock_validate): + """Test delete functionality for a successful delete request.""" + + # Mock request + request = DeleteRequest( + table=TABLE_NAME, + ids=["12345", "67890"] + ) + + # Expected payload + expected_payload = RecordServiceBulkDeleteRecordBody(skyflow_ids=request.ids) + + # Mock API response + mock_api_response = Mock() + mock_api_response.record_id_response = ["12345", "67890"] + + # Expected parsed response + expected_deleted_ids = ["12345", "67890"] + expected_response = DeleteResponse(deleted_ids=expected_deleted_ids, errors=[]) + + # Set the return value for the parse response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_delete_record.return_value = mock_api_response + + # Call the delete function + result = self.vault.delete(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_bulk_delete_record.assert_called_once_with( + VAULT_ID, + request.table, + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected DeleteResponse + self.assertEqual(result.deleted_ids, expected_deleted_ids) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_get_request") + @patch("skyflow.vault.controller._vault.parse_get_response") + def test_get_successful(self, mock_parse_response, mock_validate): + """Test get functionality for a successful get request.""" + + # Mock request + request = GetRequest( + table=TABLE_NAME, + ids=["12345", "67890"], + redaction_type=RedactionType.PLAIN_TEXT, + return_tokens=True, + fields=["field1", "field2"], + offset="0", + limit="10", + download_url=True + ) + + # Expected payload + expected_payload = { + "object_name": request.table, + "skyflow_ids": request.ids, + "redaction": request.redaction_type.value, + "tokenization": request.return_tokens, + "fields": request.fields, + "offset": request.offset, + "limit": request.limit, + "download_url": request.download_url, + "column_name": request.column_name, + "column_values": request.column_values + } + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + # Expected parsed response + expected_data = [ + {"field1": "value1", "field2": "value2"}, + {"field1": "value3", "field2": "value4"} + ] + expected_response = GetResponse(data=expected_data, errors=[]) + + # Set the return value for parse_get_response + mock_parse_response.return_value = expected_response + records_api = self.vault_client.get_records_api.return_value + records_api.record_service_bulk_get_record.return_value = mock_api_response + + # Call the get function + result = self.vault.get(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + records_api.record_service_bulk_get_record.assert_called_once_with( + VAULT_ID, + **expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected GetResponse + self.assertEqual(result.data, expected_data) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_query_request") + @patch("skyflow.vault.controller._vault.parse_query_response") + def test_query_successful(self, mock_parse_response, mock_validate): + """Test query functionality for a successful query request.""" + + # Mock request + request = QueryRequest(query="SELECT * FROM test_table") + + # Expected payload as a QueryServiceExecuteQueryBody instance + expected_payload = QueryServiceExecuteQueryBody(query=request.query) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(fields={"field1": "value1", "field2": "value2"}), + Mock(fields={"field1": "value3", "field2": "value4"}) + ] + + # Expected parsed response + expected_fields = [ + {"field1": "value1", "field2": "value2", "tokenized_data": {}}, + {"field1": "value3", "field2": "value4", "tokenized_data": {}} + ] + expected_response = QueryResponse() + expected_response.fields = expected_fields + + # Set the return value for parse_query_response + mock_parse_response.return_value = expected_response + query_api = self.vault_client.get_query_api.return_value + query_api.query_service_execute_query.return_value = mock_api_response + + # Call the query function + result = self.vault.query(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + query_api.query_service_execute_query.assert_called_once_with( + VAULT_ID, + expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected QueryResponse + self.assertEqual(result.fields, expected_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_detokenize_request") + @patch("skyflow.vault.controller._vault.parse_detokenize_response") + def test_detokenize_successful(self, mock_parse_response, mock_validate): + request = DetokenizeRequest( + tokens=["token1", "token2"], + redaction_type=RedactionType.PLAIN_TEXT, + continue_on_error=False + ) + + # Expected payload as a V1DetokenizePayload instance + tokens_list = [ + V1DetokenizeRecordRequest(token="token1", redaction=RedactionEnumREDACTION.PLAIN_TEXT), + V1DetokenizeRecordRequest(token="token2", redaction=RedactionEnumREDACTION.PLAIN_TEXT) + ] + expected_payload = V1DetokenizePayload( + detokenization_parameters=tokens_list, + continue_on_error=request.continue_on_error + ) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1", value="value1", value_type=Mock(value="STRING"), error=None), + Mock(token="token2", value="value2", value_type=Mock(value="STRING"), error=None) + ] + + # Expected parsed response + expected_fields = [ + {"token": "token1", "value": "value1", "type": "STRING"}, + {"token": "token2", "value": "value2", "type": "STRING"} + ] + expected_response = DetokenizeResponse(detokenized_fields=expected_fields, errors=[]) + + # Set the return value for parse_detokenize_response + mock_parse_response.return_value = expected_response + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_detokenize.return_value = mock_api_response + + # Call the detokenize function + result = self.vault.detokenize(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + tokens_api.record_service_detokenize.assert_called_once_with( + VAULT_ID, + detokenize_payload=expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected DetokenizeResponse + self.assertEqual(result.detokenized_fields, expected_fields) + self.assertEqual(result.errors, []) # No errors expected + + @patch("skyflow.vault.controller._vault.validate_tokenize_request") + @patch("skyflow.vault.controller._vault.parse_tokenize_response") + def test_tokenize_successful(self, mock_parse_response, mock_validate): + """Test tokenize functionality for a successful tokenize request.""" + + # Mock request with tokenization parameters + request = TokenizeRequest( + values=[ + {"value": "value1", "column_group": "group1"}, + {"value": "value2", "column_group": "group2"} + ] + ) + + # Expected payload as a V1TokenizePayload instance + records_list = [ + V1TokenizeRecordRequest(value="value1", column_group="group1"), + V1TokenizeRecordRequest(value="value2", column_group="group2") + ] + expected_payload = V1TokenizePayload(tokenization_parameters=records_list) + + # Mock API response + mock_api_response = Mock() + mock_api_response.records = [ + Mock(token="token1"), + Mock(token="token2") + ] + + # Expected parsed response + expected_fields = [ + {"token": "token1"}, + {"token": "token2"} + ] + expected_response = TokenizeResponse(tokenized_fields=expected_fields) + + # Set the return value for parse_tokenize_response + mock_parse_response.return_value = expected_response + tokens_api = self.vault_client.get_tokens_api.return_value + tokens_api.record_service_tokenize.return_value = mock_api_response + + # Call the tokenize function + result = self.vault.tokenize(request) + + # Assertions + mock_validate.assert_called_once_with(self.vault_client.get_logger(), request) + tokens_api.record_service_tokenize.assert_called_once_with( + VAULT_ID, + tokenize_payload=expected_payload + ) + mock_parse_response.assert_called_once_with(mock_api_response) + + # Check that the result matches the expected TokenizeResponse + self.assertEqual(result.tokenized_fields, expected_fields) \ No newline at end of file diff --git a/tests/vault/test_client_init.py b/tests/vault/test_client_init.py deleted file mode 100644 index 3cd670f9..00000000 --- a/tests/vault/test_client_init.py +++ /dev/null @@ -1,42 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import logging -import unittest - - -from skyflow.vault._config import * -from skyflow.vault._client import Client -from skyflow.errors._skyflow_errors import * -from skyflow import set_log_level, LogLevel - - -class TestConfig(unittest.TestCase): - - def testClientInitInvalidVaultURL(self): - config = Configuration('VAULT ID', 22, lambda: 'token') - - try: - client = Client(config) - self.fail('Should fail due to invalid VAULT URL') - except SkyflowError as e: - self.assertEqual(SkyflowErrorCodes.INVALID_INPUT.value, e.code) - self.assertEqual( - SkyflowErrorMessages.VAULT_URL_INVALID_TYPE.value % (type(22)), e.message) - - def testClientInitInvalidTokenProvider(self): - config = Configuration('VAULT ID', 'VAULT URL', 'token') - - try: - client = Client(config) - self.fail('Should fail due to invalid TOKEN PROVIDER') - except SkyflowError as e: - self.assertEqual(SkyflowErrorCodes.INVALID_INPUT.value, e.code) - self.assertEqual(SkyflowErrorMessages.TOKEN_PROVIDER_ERROR.value % ( - type('token')), e.message) - - def testLogLevel(self): - skyflowLogger = logging.getLogger('skyflow') - self.assertEqual(skyflowLogger.getEffectiveLevel(), logging.ERROR) - set_log_level(logLevel=LogLevel.DEBUG) - self.assertEqual(skyflowLogger.level, logging.DEBUG) diff --git a/tests/vault/test_config.py b/tests/vault/test_config.py deleted file mode 100644 index 7c4f5ced..00000000 --- a/tests/vault/test_config.py +++ /dev/null @@ -1,77 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -from http import client -import unittest - -from skyflow.vault._config import * -from skyflow.vault import Client -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages - - -class TestConfig(unittest.TestCase): - def testInsertOptions(self): - defaultOptions = InsertOptions() - noTokensOption = InsertOptions(tokens=False) - - self.assertEqual(defaultOptions.tokens, True) - self.assertEqual(noTokensOption.tokens, False) - - def testSkyflowConfig(self): - myconfig = Configuration( - "vaultID", "https://vaults.skyflow.com", lambda: "token") - self.assertEqual(myconfig.vaultID, "vaultID") - self.assertEqual(myconfig.vaultURL, "https://vaults.skyflow.com") - self.assertEqual(myconfig.tokenProvider(), "token") - - def testConnectionConfigDefaults(self): - config = ConnectionConfig( - 'https://skyflow.com', methodName=RequestMethod.GET) - self.assertEqual(config.connectionURL, 'https://skyflow.com') - self.assertEqual(config.methodName, RequestMethod.GET) - self.assertDictEqual(config.pathParams, {}) - self.assertDictEqual(config.queryParams, {}) - self.assertDictEqual(config.requestHeader, {}) - self.assertDictEqual(config.requestBody, {}) - - def testConfigArgs(self): - configOnlyTokenProvider = Configuration(lambda: "token") - self.assertIsNotNone(configOnlyTokenProvider.tokenProvider) - self.assertEqual(configOnlyTokenProvider.vaultID, '') - self.assertEqual(configOnlyTokenProvider.vaultURL, '') - - try: - Configuration() - except TypeError as e: - self.assertEqual(e.args[0], "tokenProvider must be given") - - def testConfigInvalidIdType(self): - try: - config = Configuration( - ['invalid'], 'www.example.org', lambda: 'token') - Client(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.VAULT_ID_INVALID_TYPE.value % type(['invalid'])) - - def testCheckConfigEmptyVaultId(self): - try: - config = Configuration('', '', lambda: 'token') - Client(config)._checkConfig('test') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_VAULT_ID.value) - - def testCheckConfigEmptyVaultURL(self): - try: - config = Configuration('vault_id', '', lambda: 'token') - Client(config)._checkConfig('test') - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_VAULT_URL.value) diff --git a/tests/vault/test_delete.py b/tests/vault/test_delete.py deleted file mode 100644 index fd6e7c3d..00000000 --- a/tests/vault/test_delete.py +++ /dev/null @@ -1,235 +0,0 @@ -import json -import unittest -import os - -import asyncio -import warnings -from unittest import mock -from unittest.mock import patch, MagicMock - -import requests -from requests import HTTPError -from requests.models import Response -from dotenv import dotenv_values - -from skyflow.errors import SkyflowError, SkyflowErrorCodes -from skyflow.errors._skyflow_errors import SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, DeleteOptions -from skyflow.vault._delete import deleteProcessResponse - - -class TestDelete(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - "12345", "demo", tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - - self.record_id = "123" - - self.mockResponse = { - "responses": [ - { - "records": [ - { - "skyflow_id": self.record_id, - "deleted": True - } - ] - } - ] - } - self.DeleteOptions = DeleteOptions(tokens=False) - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testDeleteInvalidRecordsType(self): - invalidData = {"records": "invalid"} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testDeleteMissingdata(self): - invalid_data = {} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDeleteEmptyRecords(self): - invalid_data = {"records": []} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.EMPTY_RECORDS_IN_DELETE.value) - - def testDeleteMissingRecordsKey(self): - invalid_data = {"some_other_key": "value"} - with self.assertRaises(SkyflowError) as context: - self.client.delete(invalid_data) - self.assertEqual(context.exception.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(context.exception.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDeleteNoIds(self): - invalidData = {"records": [{"invalid": "invalid", "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testDeleteInvalidIdType(self): - invalidData = {"records": [{"id": ["invalid"], "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (list)) - - def testDeleteInvalidIdType2(self): - invalidData = {"records": [{"id": 123, "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testDeleteEmptyId(self): - invalidData = {"records": [{"id": "", "table": "stripe"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.EMPTY_ID_IN_DELETE.value) - - def testDeleteNoTable(self): - invalidData = {"records": [{"id": "id1", "invalid": "invalid"}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testDeleteInvalidTableType(self): - invalidData = {"records": [{"id": "id1", "table": ["invalid"]}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testDeleteEmptyTable(self): - invalidData = {"records": [{"id": "123", "table": ""}]} - try: - self.client.delete(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.EMPTY_TABLE_IN_DELETE.value) - - def testDeleteProcessResponseWithSuccessfulResponse(self): - mock_response = requests.Response() - mock_response.status_code = 200 - mock_response._content = b'{"key": "value"}' - partial, result = deleteProcessResponse(mock_response) - self.assertFalse(partial) - self.assertIsInstance(result, dict) - self.assertEqual(result, {"key": "value"}) - - def testDeleteProcessResponseWithNoContentResponse(self): - mock_response = requests.Response() - mock_response.status_code = 204 - result = deleteProcessResponse(mock_response) - self.assertIsNone(result) - - def test_http_error_with_error_message(self): - error_response = { - 'code': 400, - 'description': 'Error occurred' - } - response = mock.Mock(spec=requests.Response, status_code=400, - content=json.dumps(error_response).encode()) - partial, error = deleteProcessResponse(response) - self.assertFalse(partial) - self.assertEqual(error, { - "code": 400, - "description": "Error occurred", - }) - - def test_delete_data_with_errors(self): - response = mock.Mock(spec=requests.Response) - response.status_code = 404 - response.content = b'{"code": 404, "description": "Not found"}' - with mock.patch('requests.delete', return_value=response): - records = {"records": [ - {"id": "id1", "table": "stripe"}, - ]} - result = self.client.delete(records) - - self.assertIn('errors', result) - error = result['errors'][0] - self.assertEqual(error['id'], "id1") - self.assertEqual(error['error'], {'code': 404, 'description': 'Not found'}) - - def testDeleteProcessInvalidResponse(self): - response = Response() - response.status_code = 500 - response._content = b"Invalid Request" - try: - deleteProcessResponse(response) - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % - response.content.decode('utf-8')) - - def test_delete_process_response_with_error(self): - mock_response = mock.Mock(spec=requests.Response) - mock_response.status_code = 404 - mock_response.content = b'{"error": {"message": "Not found"}}' - mock_response.headers = {'x-request-id': 'request-id-123'} - partial, error = deleteProcessResponse(mock_response) - self.assertFalse(partial) - self.assertEqual(error, {"error": {"message": "Not found"}}) - - def test_delete_process_response_response_not_json(self): - mock_response = mock.Mock(spec=requests.Response) - mock_response.status_code = 500 - mock_response.content = b'Not a valid JSON response' - - with self.assertRaises(SkyflowError) as cm: - deleteProcessResponse(mock_response) - - exception = cm.exception - self.assertEqual(exception.code, 500) - self.assertIn("Not a valid JSON response", str(exception)) diff --git a/tests/vault/test_detokenize.py b/tests/vault/test_detokenize.py deleted file mode 100644 index 28ee8bed..00000000 --- a/tests/vault/test_detokenize.py +++ /dev/null @@ -1,265 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os -from skyflow.vault._detokenize import getDetokenizeRequestBody, createDetokenizeResponseBody, getBulkDetokenizeRequestBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault._client import Client, Configuration -from skyflow.service_account import generate_bearer_token -from skyflow.vault._config import DetokenizeOptions, RedactionType -from dotenv import dotenv_values -import warnings - -import json -import asyncio - - -class TestDetokenize(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.testToken = self.envValues["DETOKENIZE_TEST_TOKEN"] - self.tokenField = { - "token": self.envValues["DETOKENIZE_TEST_TOKEN"] - } - self.data = {"records": [self.tokenField]} - self.mocked_futures = [] - self.event_loop = asyncio.new_event_loop() - - def tokenProvider(): - token, _ = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result((json.dumps(response).encode(), statusCode)) - else: - future.set_result((response, statusCode)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetDetokenizeRequestBodyWithValidBody(self): - body = getDetokenizeRequestBody(self.tokenField) - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "PLAIN_TEXT" - }] - } - - self.assertEqual(body, expectedOutput) - - def testDetokenizeNoRecords(self): - invalidData = {"invalidKey": self.tokenField} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testDetokenizeRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testDetokenizeNoToken(self): - invalidData = {"records": [{"invalid": "invalid"}]} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_KEY_ERROR.value) - - def testDetokenizeTokenInvalidType(self): - invalidData = {"records": [{"token": ["invalid"]}]} - try: - self.client.detokenize(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TOKEN_TYPE.value % (list)) - - def testDetokenizeRedactionInvalidType(self): - invalidData = {"records": [{"token": "valid", "redaction": 'demo'}]} - try: - self.client.detokenize(invalidData) - except SkyflowError as error: - self.assertTrue(error) - self.assertEqual(error.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(error.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % str(type("demo"))) - - def testResponseBodySuccess(self): - response = {"records": [{"token": "abc", "value": "secret"}]} - self.add_mock_response(response, 200) - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - self.assertEqual(partial, False) - self.assertIn("records", res) - self.assertNotIn("errors", res) - self.assertEqual(len(res["records"]), 1) - self.assertEqual(res, {"records": response["records"]}) - - def testResponseBodyPartialSuccess(self): - success_response = {"records": [{"token": "abc", "value": "secret"}]} - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(success_response, 200) - self.add_mock_response(error_response, 404) - - detokenizeRecords = {"records": [self.tokenField, self.tokenField]} - - res, partial = createDetokenizeResponseBody(detokenizeRecords, self.mocked_futures, DetokenizeOptions()) - self.assertTrue(partial) - - records = res["records"] - self.assertIsNotNone(records) - self.assertEqual(len(records), 1) - self.assertEqual(records, success_response["records"]) - - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseBodyFailure(self): - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(error_response, 404) - - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - self.assertFalse(partial) - - self.assertNotIn("records", res) - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseBodySuccessWithContinueOnErrorAsFalse(self): - response = { - "records": [ - {"token": "abc", "value": "secret1"}, - {"token": "def", "value": "secret2"} - ] - } - self.add_mock_response(response, 200) - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions(False)) - self.assertEqual(partial, False) - self.assertIn("records", res) - self.assertNotIn("errors", res) - self.assertEqual(len(res["records"]), 2) - self.assertEqual(res, {"records": response["records"]}) - - def testResponseBodyFailureWithContinueOnErrorAsFalse(self): - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(error_response, 404) - - res, partial = createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions(False)) - self.assertFalse(partial) - - self.assertNotIn("records", res) - errors = res["errors"] - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], error_response["error"]["http_code"]) - self.assertEqual(errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseNotJson(self): - response = "not a valid json".encode() - self.add_mock_response(response, 200, encode=False) - try: - createDetokenizeResponseBody(self.data, self.mocked_futures, DetokenizeOptions()) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % - response.decode('utf-8')) - - def testRequestBodyNoRedactionKey(self): - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "PLAIN_TEXT" - }] - } - requestBody = getDetokenizeRequestBody(self.tokenField) - self.assertEqual(requestBody, expectedOutput) - - def testRequestBodyWithValidRedaction(self): - expectedOutput = { - "detokenizationParameters": [{ - "token": self.testToken, - "redaction": "REDACTED" - }] - } - data = { - "token": self.testToken, - "redaction": RedactionType.REDACTED - } - requestBody = getDetokenizeRequestBody(data) - self.assertEqual(expectedOutput, requestBody) - - def testRequestBodyWithInValidRedaction(self): - data = { - "token": self.testToken, - "redaction": "123" - } - try: - getDetokenizeRequestBody(data) - except SkyflowError as error: - self.assertTrue(error) - self.assertEqual(error.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(error.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % str(type(data["redaction"]))) - - def testGetBulkDetokenizeRequestBody(self): - expectedOutput = { - "detokenizationParameters": [ - { - "token": self.testToken, - "redaction": "REDACTED" - }, - { - "token": self.testToken, - "redaction": "REDACTED" - }, - ] - } - data = { - "token": self.testToken, - "redaction": RedactionType.REDACTED - } - try: - requestBody = getBulkDetokenizeRequestBody([data, data]) - self.assertIn("detokenizationParameters", requestBody) - self.assertEqual(len(requestBody["detokenizationParameters"]), 2) - self.assertEqual(expectedOutput, requestBody) - except SkyflowError as e: - self.fail('Should not have thrown an error') - \ No newline at end of file diff --git a/tests/vault/test_get.py b/tests/vault/test_get.py deleted file mode 100644 index bd98efc1..00000000 --- a/tests/vault/test_get.py +++ /dev/null @@ -1,259 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os - -import warnings -import asyncio -import json -from dotenv import dotenv_values -from skyflow.service_account import generate_bearer_token -from skyflow.vault import Client, Configuration, RedactionType, GetOptions -from skyflow.vault._get import getGetRequestBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages - -class TestGet(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/test_get.py') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, table, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result( - (json.dumps(response).encode(), statusCode, table)) - else: - future.set_result((response, statusCode, table)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetByIdNoRecords(self): - invalidData = {"invalidKey": "invalid"} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetByIdRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testGetByIdNoIds(self): - invalidData = {"records": [ - {"invalid": "invalid", "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidIdsType(self): - invalidData = {"records": [ - {"ids": "invalid", "table": "newstripe", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_IDS_TYPE.value % (str)) - - def testGetByIdInvalidIdsType2(self): - invalidData = {"records": [ - {"ids": ["123", 123], "table": "newstripe", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testGetByIdNoTable(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "invalid": "invalid", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetByIdInvalidTableType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": ["invalid"], "redaction": "PLAIN_TEXT"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testGetByIdNoColumnName(self): - invalidData = {"records": [ - {"table": "newstripe", "redaction": RedactionType.PLAIN_TEXT}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidColumnName(self): - invalidData = {"records": [ - {"ids": ["123", "456"], "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, - "columnName": ["invalid"]}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_COLUMN_NAME.value % (list)) - - def testGetByIdNoColumnValues(self): - invalidData = {"records": [ - {"table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, "columnName": "card_number"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UNIQUE_COLUMN_OR_IDS_KEY_ERROR.value) - - def testGetByIdInvalidColumnValues(self): - invalidData = {"records": [ - {"ids": ["123", "456"], "table": "newstripe", "redaction": RedactionType.PLAIN_TEXT, - "columnName": "card_number", "columnValues": "invalid"}]} - try: - self.client.get(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_COLUMN_VALUE.value % (str) ) - - def testGetByTokenAndRedaction(self): - invalidData = {"records": [ - {"ids": ["123","456"], - "table": "stripe", "redaction": RedactionType.PLAIN_TEXT,}]} - options = GetOptions(True) - try: - self.client.get(invalidData,options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.REDACTION_WITH_TOKENS_NOT_SUPPORTED.value) - - def testGetByNoOptionAndRedaction(self): - invalidData = {"records":[{"ids":["123", "456"], "table":"newstripe"}]} - options = GetOptions(False) - try: - self.client.get(invalidData,options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code,SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message,SkyflowErrorMessages.REDACTION_KEY_ERROR.value) - - def testGetByOptionAndUniqueColumnRedaction(self): - invalidData ={ - "records":[{ - "table":"newstripe", - "columnName":"card_number", - "columnValues":["456","980"], - }] - } - options = GetOptions(True) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.TOKENS_GET_COLUMN_NOT_SUPPORTED.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKENS_GET_COLUMN_NOT_SUPPORTED.value) - - def testInvalidRedactionTypeWithNoOption(self): - invalidData = { - "records": [{ - "ids": ["123","456"], - "table": "stripe", - "redaction": "invalid_redaction" - }] - } - options = GetOptions(False) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % (str)) - - def testBothSkyflowIdsAndColumnDetailsPassed(self): - invalidData = { - "records": [ - { - "ids": ["123", "456"], - "table": "stripe", - "redaction": RedactionType.PLAIN_TEXT, - "columnName": "email", - "columnValues": ["email1@gmail.com", "email2@gmail.co"] - } - ] - } - options = GetOptions(False) - try: - self.client.get(invalidData, options=options) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.BOTH_IDS_AND_COLUMN_DETAILS_SPECIFIED.value) - - def testGetRequestBodyReturnsRequestBodyWithIds(self): - validData = { - "records": [{ - "ids": ["123", "456"], - "table": "stripe", - }] - } - options = GetOptions(True) - try: - requestBody = getGetRequestBody(validData["records"][0], options) - self.assertTrue(requestBody["tokenization"]) - except SkyflowError as e: - self.fail('Should not have thrown an error') \ No newline at end of file diff --git a/tests/vault/test_get_by_id.py b/tests/vault/test_get_by_id.py deleted file mode 100644 index d9676237..00000000 --- a/tests/vault/test_get_by_id.py +++ /dev/null @@ -1,193 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest -import os - -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault import Client, Configuration, RedactionType -from skyflow.vault._get_by_id import createGetResponseBody -from skyflow.service_account import generate_bearer_token -from dotenv import dotenv_values -import warnings -import asyncio -import json - - -class TestGetById(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.event_loop = asyncio.new_event_loop() - self.mocked_futures = [] - - def tokenProvider(): - token, type = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, table, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result( - (json.dumps(response).encode(), statusCode, table)) - else: - future.set_result((response, statusCode, table)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetByIdNoRecords(self): - invalidData = {"invalidKey": "invalid"} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetByIdRecordsInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testGetByIdNoIds(self): - invalidData = {"records": [ - {"invalid": "invalid", "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testGetByIdInvalidIdsType(self): - invalidData = {"records": [ - {"ids": "invalid", "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_IDS_TYPE.value % (str)) - - def testGetByIdInvalidIdsType2(self): - invalidData = {"records": [ - {"ids": ["123", 123], "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (int)) - - def testGetByIdNoTable(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "invalid": "invalid", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetByIdInvalidTableType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": ["invalid"], "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testGetByIdNoRedaction(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": "pii_fields", "invalid": "invalid"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.REDACTION_KEY_ERROR.value) - - def testGetByIdInvalidRedactionType(self): - invalidData = {"records": [ - {"ids": ["id1", "id2"], "table": "pii_fields", "redaction": "PLAIN_TEXT"}]} - try: - self.client.get_by_id(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REDACTION_TYPE.value % (str)) - - def testCreateResponseBodySuccess(self): - response = {"records": [ - {"fields": {"card_number": "4111-1111-1111-1111"}}]} - self.add_mock_response(response, 200, "table") - result, partial = createGetResponseBody(self.mocked_futures) - - self.assertFalse(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["fields"], - response["records"][0]["fields"]) - self.assertEqual(result["records"][0]["table"], "table") - - def testCreateResponseBodyPartialSuccess(self): - success_response = {"records": [ - {"fields": {"card_number": "4111-1111-1111-1111"}}]} - self.add_mock_response(success_response, 200, "table") - - failed_response = {"error": { - "http_code": 404, - "message": "Not Found" - }} - self.add_mock_response(failed_response, 404, "ok") - - result, partial = createGetResponseBody(self.mocked_futures) - - self.assertTrue(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["fields"], - success_response["records"][0]["fields"]) - self.assertEqual(result["records"][0]["table"], "table") - - self.assertTrue(len(result["errors"]), 1) - self.assertEqual(result["errors"][0]['error']['code'], - failed_response["error"]['http_code']) - self.assertEqual(result["errors"][0]['error']['description'], - failed_response["error"]['message']) - - def testCreateResponseBodyInvalidJson(self): - response = "invalid json" - self.add_mock_response(response.encode(), 200, 'table', encode=False) - - try: - createGetResponseBody(self.mocked_futures) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % response) \ No newline at end of file diff --git a/tests/vault/test_insert.py b/tests/vault/test_insert.py deleted file mode 100644 index c39e8e3c..00000000 --- a/tests/vault/test_insert.py +++ /dev/null @@ -1,649 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -from requests.models import Response -from dotenv import dotenv_values -from skyflow.vault._insert import getInsertRequestBody, processResponse, convertResponse, getUpsertColumn, validateUpsertOptions -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, InsertOptions, UpsertOption, BYOT - - -class TestInsert(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - record = { - "table": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens":{ - "cardNumber": "4111-1111-1111-1111", - } - } - self.data = {"records": [record]} - self.mockRequest = {"records": [record]} - record2 = { - "table": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - } - } - self.data2 = {"records": [record2]} - self.mockRequest2 = {"records": [record2]} - - self.mockResponse = { - "responses": [ - { - "records": [ - { - "skyflow_id": 123, - "tokens": { - "first_name": "4db12c22-758e-4fc9-b41d-e8e48b876776", - "cardNumber": "card_number_token", - "cvv": "cvv_token", - "expiry_date": "6b45daa3-0e81-42a8-a911-23929f1cf9da" - - } - } - ], - } - ], - "requestId": "2g3fd14-z9bs-xnvn4k6-vn1s-e28w35" - } - - self.mockResponseCOESuccessObject = { - "Body": { - "records": self.mockResponse['responses'][0]['records'] - }, - "Status": 200 - } - - self.mockResponseCOEErrorObject = { - "Body": { - "error": "Error Inserting Records due to unique constraint violation" - }, - "Status": 400 - } - - self.mockResponseCOESuccess = { - "responses": [self.mockResponseCOESuccessObject], - "requestId": self.mockResponse['requestId'] - } - - self.mockResponseCOEPartialSuccess = { - "responses": [ - self.mockResponseCOESuccessObject, - self.mockResponseCOEErrorObject - ], - "requestId": self.mockResponse['requestId'] - } - - self.mockResponseCOEFailure = { - "responses": [self.mockResponseCOEErrorObject], - "requestId": self.mockResponse['requestId'] - } - - self.insertOptions = InsertOptions(tokens=True) - self.insertOptions2 = InsertOptions(tokens=True, byot=BYOT.ENABLE) - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetInsertRequestBodyWithValidBody(self): - body = json.loads(getInsertRequestBody(self.data, self.insertOptions2)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens":{ - "cardNumber": "4111-1111-1111-1111", - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidBodyWithoutTokens(self): - body = json.loads(getInsertRequestBody(self.data2, self.insertOptions)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidUpsertOptions(self): - body = json.loads(getInsertRequestBody(self.data, InsertOptions(True,[UpsertOption(table='pii_fields',column='column1')], byot=BYOT.ENABLE))) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens": { - "cardNumber": "4111-1111-1111-1111", - }, - "method": 'POST', - "quorum": True, - "tokenization": True, - "upsert": 'column1', - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyWithValidUpsertOptionsWithOutTokens(self): - body = json.loads(getInsertRequestBody(self.data2, InsertOptions(True,[UpsertOption(table='pii_fields',column='column1')]))) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "method": 'POST', - "quorum": True, - "tokenization": True, - "upsert": 'column1', - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyNoRecords(self): - invalidData = {"invalidKey": self.data["records"]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testGetInsertRequestBodyRecordsInvalidType(self): - invalidData = {"records": 'records'} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str(type('str')))) - - def testGetInsertRequestBodyNoFields(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - } - }, - { - "table": "table", - "invalid": {} - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FIELDS_KEY_ERROR.value) - - def testGetInsertRequestBodyInvalidFieldsType(self): - invalidData = {"records": [{ - "table": "table", - "fields": 'fields' - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % (str(type('str')))) - - def testInvalidTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": "tokens" - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TOKENS_TYPE.value % (str(type('str')))) - - def testEmptyTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": { - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_TOKENS_IN_INSERT.value) - - def testMismatchTokensInRecord(self): - invalidData = {"records": [{ - "table": "table", - "fields": { - "card_number": "4111-1111" - }, - "tokens": { - "cvv": "123" - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions2) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS.value) - - # def testTokensInRecord(self): - # invalidData = {"records": [{ - # "table": "table", - # "fields": { - # "card_number": "4111-1111" - # }, - # "tokens": { - # "cvv": "123" - # } - # } - # ]} - # try: - # getInsertRequestBody(invalidData, self.insertOptions) - # self.fail('Should have thrown an error') - # except SkyflowError as e: - # self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - # self.assertEqual( - # e.message, SkyflowErrorMessages.MISMATCH_OF_FIELDS_AND_TOKENS.value) - - def testGetInsertRequestBodyWithTokensValidBody(self): - body = json.loads(getInsertRequestBody(self.data, self.insertOptions2)) - expectedOutput = { - "tableName": "pii_fields", - "fields": { - "cardNumber": "4111-1111-1111-1111", - "cvv": "234" - }, - "tokens": { - "cardNumber": "4111-1111-1111-1111", - - }, - "method": 'POST', - "quorum": True, - "tokenization": True - } - self.assertEqual(body["records"][0], expectedOutput) - - def testGetInsertRequestBodyNoTable(self): - invalidData = {"records": [{ - "noTable": "tableshouldbehere", - "fields": { - "card_number": "4111-1111" - } - }, - { - "table": "table", - "invalid": {} - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testGetInsertRequestBodyInvalidTableType(self): - invalidData = {"records": [{ - "table": {'invalidtype': 'thisisinvalid'}, - "fields": { - "card_number": "4111-1111" - } - } - ]} - try: - getInsertRequestBody(invalidData, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % ( - str(type({'a': 'b'})))) - - def testGetInsertRequestBodyWithContinueOnErrorAsTrue(self): - try: - options = InsertOptions(tokens=True, continueOnError=True, byot=BYOT.ENABLE) - request = getInsertRequestBody(self.data, options) - self.assertIn('continueOnError', request) - request = json.loads(request) - self.assertEqual(request['continueOnError'], True) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testGetInsertRequestBodyWithContinueOnErrorAsFalse(self): - try: - options = InsertOptions(tokens=True, continueOnError=False, byot=BYOT.ENABLE) - request = getInsertRequestBody(self.data, options) - # assert 'continueOnError' in request - self.assertIn('continueOnError', request) - request = json.loads(request) - self.assertEqual(request['continueOnError'], False) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testGetInsertRequestBodyWithoutContinueOnError(self): - try: - request = getInsertRequestBody(self.data, self.insertOptions2) - # assert 'continueOnError' not in request - self.assertNotIn('continueOnError', request) - except SkyflowError as e: - self.fail('Should not have thrown an error') - - def testInsertInvalidJson(self): - invalidjson = { - "records": [{ - "table": "table", - "fields": { - "invalid": json - } - }] - } - - try: - getInsertRequestBody(invalidjson, self.insertOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_JSON.value % ('insert payload')) - - def testProcessInvalidResponse(self): - response = Response() - response.status_code = 500 - response._content = b"Invalid Request" - try: - processResponse(response) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % - response.content.decode('utf-8')) - - def testProcessValidResponse(self): - response = Response() - response.status_code = 200 - response._content = b'{"key": "value"}' - try: - responseDict = processResponse(response) - self.assertDictEqual(responseDict, {'key': 'value'}) - except SkyflowError as e: - self.fail() - - def testClientInit(self): - config = Configuration( - 'vaultid', 'https://skyflow.com', lambda: 'test') - client = Client(config) - self.assertEqual(client.vaultURL, 'https://skyflow.com') - self.assertEqual(client.vaultID, 'vaultid') - self.assertEqual(client.tokenProvider(), 'test') - - def testProcessResponseInvalidJson(self): - invalid_response = Response() - invalid_response.status_code = 200 - invalid_response._content = b'invalid-json' - try: - processResponse(invalid_response) - self.fail('not failing on invalid json') - except SkyflowError as se: - self.assertEqual(se.code, 200) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'invalid-json') - - def testProcessResponseFail(self): - invalid_response = Response() - invalid_response.status_code = 404 - invalid_response._content = b"error" - try: - processResponse(invalid_response) - self.fail('Not failing on invalid error json') - except SkyflowError as se: - self.assertEqual(se.code, 404) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'error') - - def testConvertResponseNoTokens(self): - options = InsertOptions(tokens=False) - result, partial = convertResponse(self.mockRequest, self.mockResponse, options) - self.assertFalse(partial) - self.assertEqual(len(result["records"]), 1) - self.assertEqual(result["records"][0]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["table"], "pii_fields") - self.assertEqual(result["records"][0]["request_index"], 0) - self.assertNotIn("tokens", result["records"][0]) - - def testConvertResponseWithTokens(self): - options = InsertOptions(tokens=True) - result, partial = convertResponse(self.mockRequest, self.mockResponse, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["request_index"], 0) - - self.assertEqual(result["records"][0]["fields"] - ["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"] - ["cvv"], "cvv_token") - - def testConvertResponseWithContinueoOnErrorSuccess(self): - options = InsertOptions(tokens=True, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOESuccess, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("errors", result) - - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["fields"]["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"]["cvv"], "cvv_token") - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - def testConvertResponseWithContinueoOnErrorAndNoTokensSuccess(self): - options = InsertOptions(tokens=False, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOESuccess, options) - self.assertFalse(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertNotIn("errors", result) - - self.assertIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["skyflow_id"], 123) - - self.assertIn("table", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertNotIn("fields", result["records"][0]) - self.assertNotIn("tokens", result["records"][0]) - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - def testConvertResponseWithContinueoOnErrorPartialSuccess(self): - options = InsertOptions(tokens=True, continueOnError=True) - partialSuccessRequest = { - "records": [ - self.mockRequest['records'][0], - self.mockRequest['records'][0], - ] - } - result, partial = convertResponse(partialSuccessRequest, self.mockResponseCOEPartialSuccess, options) - self.assertTrue(partial) - - self.assertEqual(len(result["records"]), 1) - self.assertEqual(len(result["errors"]), 1) - - self.assertNotIn("skyflow_id", result["records"][0]) - self.assertEqual(result["records"][0]["table"], "pii_fields") - - self.assertIn("fields", result["records"][0]) - self.assertEqual(result["records"][0]["fields"]["skyflow_id"], 123) - self.assertEqual(result["records"][0]["fields"]["cardNumber"], "card_number_token") - self.assertEqual(result["records"][0]["fields"]["cvv"], "cvv_token") - - self.assertIn("request_index", result["records"][0]) - self.assertEqual(result["records"][0]["request_index"], 0) - - message = self.mockResponseCOEErrorObject['Body']['error'] - message += ' - request id: ' + self.mockResponse['requestId'] - self.assertEqual(result["errors"][0]["error"]["code"], 400) - self.assertEqual(result["errors"][0]["error"]["description"], message) - - self.assertIn("request_index", result["errors"][0]["error"]) - self.assertEqual(result["errors"][0]["error"]["request_index"], 1) - - def testConvertResponseWithContinueoOnErrorFailure(self): - options = InsertOptions(tokens=True, continueOnError=True) - result, partial = convertResponse(self.mockRequest, self.mockResponseCOEFailure, options) - self.assertFalse(partial) - - self.assertEqual(len(result["errors"]), 1) - self.assertNotIn("records", result) - - message = self.mockResponseCOEErrorObject['Body']['error'] - message += ' - request id: ' + self.mockResponse['requestId'] - self.assertEqual(result["errors"][0]["error"]["code"], 400) - self.assertEqual(result["errors"][0]["error"]["description"], message) - self.assertIn("request_index", result["errors"][0]["error"]) - self.assertEqual(result["errors"][0]["error"]["request_index"], 0) - - def testInsertInvalidToken(self): - config = Configuration('id', 'url', lambda: 'invalid-token') - try: - Client(config).insert({'records': []}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) - - def testGetUpsertColumn(self): - testUpsertOptions = [UpsertOption(table='table1',column='column1'), - UpsertOption(table='table2',column='column2')] - upsertValid = getUpsertColumn('table1',upsertOptions=testUpsertOptions) - upsertInvalid = getUpsertColumn('table3',upsertOptions=testUpsertOptions) - self.assertEqual(upsertValid,'column1') - self.assertEqual(upsertInvalid,'') - - def testValidUpsertOptions(self): - testUpsertOptions = 'upsert_string' - try: - validateUpsertOptions(testUpsertOptions) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_OPTIONS_TYPE.value % type(testUpsertOptions) ) - try: - validateUpsertOptions(upsertOptions=[]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTIONS_LIST.value) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table=123,column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_TABLE_TYPE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='',column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_TABLE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='table1',column=1343)]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_UPSERT_COLUMN_TYPE.value % 0) - try: - validateUpsertOptions(upsertOptions=[UpsertOption(table='table2',column='')]) - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_UPSERT_OPTION_COLUMN.value % 0) - - def testTokensPassedWithByotModeDisable(self): - try: - options = InsertOptions(byot=BYOT.DISABLE) - getInsertRequestBody(self.data, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.TOKENS_PASSED_FOR_BYOT_DISABLE.value) - - def testTokensNotPassedWithByotModeEnable(self): - try: - getInsertRequestBody(self.data2, self.insertOptions2) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % "ENABLE") - - def testTokensNotPassedWithByotModeEnableStrict(self): - try: - options = InsertOptions(byot=BYOT.ENABLE_STRICT) - getInsertRequestBody(self.data2, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.NO_TOKENS_IN_INSERT.value % "ENABLE_STRICT") - - def testTokensPassedWithByotModeEnableStrict(self): - try: - options = InsertOptions(byot=BYOT.ENABLE_STRICT) - getInsertRequestBody(self.data, options) - self.fail("Should have thrown an error") - except SkyflowError as e: - self.assertEqual(e.message, SkyflowErrorMessages.INSUFFICIENT_TOKENS_PASSED_FOR_BYOT_ENABLE_STRICT.value) diff --git a/tests/vault/test_invoke_connection.py b/tests/vault/test_invoke_connection.py deleted file mode 100644 index 15d562cd..00000000 --- a/tests/vault/test_invoke_connection.py +++ /dev/null @@ -1,148 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest - -from requests import request -from skyflow.service_account._token import generate_bearer_token -from skyflow.vault._connection import * -from skyflow.vault._client import * -from skyflow.vault._config import * -from skyflow.errors._skyflow_errors import * -from dotenv import dotenv_values - - -class testInvokeConnection(unittest.TestCase): - def testCreateRequestDefault(self): - config = ConnectionConfig('https://skyflow.com/', RequestMethod.GET) - try: - req = createRequest(config) - body, url, method = req.body, req.url, req.method - self.assertEqual(url, 'https://skyflow.com/') - self.assertEqual(body, '{}') - self.assertEqual(method, RequestMethod.GET.value) - except SkyflowError: - self.fail() - - def testCreateRequestInvalidJSONBody(self): - invalidJsonBody = {'somekey': unittest} - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestBody=invalidJsonBody) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REQUEST_BODY.value) - - def testCreateRequestInvalidBodyType(self): - nonDictBody = 'body' - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestBody=nonDictBody) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_REQUEST_BODY.value) - - def testCreateRequestBodyInvalidHeadersJson(self): - invalidJsonHeaders = {'somekey': unittest} - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestHeader=invalidJsonHeaders) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_HEADERS.value) - - def testCreateRequestBodyHeadersNotDict(self): - invalidJsonHeaders = 'invalidheaderstype' - config = ConnectionConfig( - 'https://skyflow.com/', RequestMethod.GET, requestHeader=invalidJsonHeaders) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_HEADERS.value) - - def testCreateRequestInvalidURL(self): - invalidUrl = 'https::///skyflow.com' - config = ConnectionConfig(invalidUrl, RequestMethod.GET) - try: - createRequest(config) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_URL.value % (invalidUrl)) - - def testPathParams(self): - try: - url = parsePathParams(url='https://skyflow.com/{name}/{department}/content/{action}', - pathParams={'name': 'john', 'department': 'test', 'action': 'download'}) - - expectedURL = 'https://skyflow.com/john/test/content/download' - - self.assertEqual(url, expectedURL) - except SkyflowError as e: - self.fail() - - def testVerifyParamsPathParamsNotDict(self): - pathParams = {'name': 'john', 'department': ['test'], 'action': 1} - try: - verifyParams({}, pathParams) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_PATH_PARAM_TYPE.value % ( - str(type('department')), str(type(['str'])))) - - def testVerifyParamsQueryParamsNotDict(self): - queryParams = {'name': 'john', 2: [json], 'action': 1} - try: - verifyParams(queryParams, {}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual(e.message, SkyflowErrorMessages.INVALID_QUERY_PARAM_TYPE.value % ( - str(type(2)), str(type(['str'])))) - - def testVerifyParamsInvalidPathParams(self): - pathParams = 'string' - try: - verifyParams({}, pathParams) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_PATH_PARAMS.value) - - def testVerifyParamsInvalidQueryParams(self): - queryParams = 'string' - try: - verifyParams(queryParams, {}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_QUERY_PARAMS.value) - - def testInvokeConnectionFailure(self): - config = Configuration('', '', lambda: 'token') - client = Client(config) - connectionConfig = ConnectionConfig( - 'url', RequestMethod.POST, requestBody=[]) - try: - client.invoke_connection(connectionConfig) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) diff --git a/tests/vault/test_query.py b/tests/vault/test_query.py deleted file mode 100644 index 63f90794..00000000 --- a/tests/vault/test_query.py +++ /dev/null @@ -1,175 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -from unittest import mock -import requests -from requests.models import Response -from skyflow.vault._query import getQueryRequestBody, getQueryResponse -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.vault._client import Client -from skyflow.vault._config import Configuration, QueryOptions - -class TestQuery(unittest.TestCase): - - def setUp(self) -> None: - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - query = "SELECT * FROM pii_fields WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'" - self.data = {"query": query} - self.mockRequest = {"records": [query]} - - self.mockResponse = { - "records": [ - { - "fields": { - "card_number": "XXXXXXXXXXXX1111", - "card_pin": "*REDACTED*", - "cvv": "", - "expiration_date": "*REDACTED*", - "expiration_month": "*REDACTED*", - "expiration_year": "*REDACTED*", - "name": "a***te", - "skyflow_id": "3ea3861-x107-40w8-la98-106sp08ea83f", - "ssn": "XXX-XX-6789", - "zip_code": None - }, - "tokens": None - } - ] - } - - self.requestId = '5d5d7e21-c789-9fcc-ba31-2a279d3a28ef' - - self.mockApiError = { - "error": { - "grpc_code": 13, - "http_code": 500, - "message": "ERROR (internal_error): Could not find Notebook Mapping Notebook Name was not found", - "http_status": "Internal Server Error", - "details": [] - } - } - - self.mockFailResponse = { - "error": { - "code": 500, - "description": "ERROR (internal_error): Could not find Notebook Mapping Notebook Name was not found - request id: 5d5d7e21-c789-9fcc-ba31-2a279d3a28ef" - } - } - - self.queryOptions = QueryOptions() - - return super().setUp() - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testGetQueryRequestBodyWithValidBody(self): - body = json.loads(getQueryRequestBody(self.data, self.queryOptions)) - expectedOutput = { - "query": "SELECT * FROM pii_fields WHERE skyflow_id='3ea3861-x107-40w8-la98-106sp08ea83f'", - } - self.assertEqual(body, expectedOutput) - - def testGetQueryRequestBodyNoQuery(self): - invalidData = {"invalidKey": self.data["query"]} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.QUERY_KEY_ERROR.value) - - def testGetQueryRequestBodyInvalidType(self): - invalidData = {"query": ['SELECT * FROM table_name']} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_QUERY_TYPE.value % (str(type(invalidData["query"])))) - - def testGetQueryRequestBodyEmptyBody(self): - invalidData = {"query": ''} - try: - getQueryRequestBody(invalidData, self.queryOptions) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.EMPTY_QUERY.value) - - def testGetQueryValidResponse(self): - response = Response() - response.status_code = 200 - response._content = b'{"key": "value"}' - try: - responseDict = getQueryResponse(response) - self.assertDictEqual(responseDict, {'key': 'value'}) - except SkyflowError as e: - self.fail() - - def testClientInit(self): - config = Configuration( - 'vaultid', 'https://skyflow.com', lambda: 'test') - client = Client(config) - self.assertEqual(client.vaultURL, 'https://skyflow.com') - self.assertEqual(client.vaultID, 'vaultid') - self.assertEqual(client.tokenProvider(), 'test') - - def testGetQueryResponseSuccessInvalidJson(self): - invalid_response = Response() - invalid_response.status_code = 200 - invalid_response._content = b'invalid-json' - try: - getQueryResponse(invalid_response) - self.fail('not failing on invalid json') - except SkyflowError as se: - self.assertEqual(se.code, 200) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'invalid-json') - - def testGetQueryResponseFailInvalidJson(self): - invalid_response = mock.Mock( - spec=requests.Response, - status_code=404, - content=b'error' - ) - invalid_response.raise_for_status.side_effect = requests.exceptions.HTTPError("Not found") - try: - getQueryResponse(invalid_response) - self.fail('Not failing on invalid error json') - except SkyflowError as se: - self.assertEqual(se.code, 404) - self.assertEqual( - se.message, SkyflowErrorMessages.RESPONSE_NOT_JSON.value % 'error') - - def testGetQueryResponseFail(self): - response = mock.Mock( - spec=requests.Response, - status_code=500, - content=json.dumps(self.mockApiError).encode('utf-8') - ) - response.headers = {"x-request-id": self.requestId} - response.raise_for_status.side_effect = requests.exceptions.HTTPError("Server Error") - try: - getQueryResponse(response) - self.fail('not throwing exception when error code is 500') - except SkyflowError as e: - self.assertEqual(e.code, 500) - self.assertEqual(e.message, SkyflowErrorMessages.SERVER_ERROR.value) - self.assertDictEqual(e.data, self.mockFailResponse) - - def testQueryInvalidToken(self): - config = Configuration('id', 'url', lambda: 'invalid-token') - try: - Client(config).query({'query': 'SELECT * FROM table_name'}) - self.fail() - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TOKEN_PROVIDER_INVALID_TOKEN.value) diff --git a/tests/vault/test_token_provider_wrapper.py b/tests/vault/test_token_provider_wrapper.py deleted file mode 100644 index 7d78a67a..00000000 --- a/tests/vault/test_token_provider_wrapper.py +++ /dev/null @@ -1,62 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import unittest - -import dotenv -from skyflow.vault._token import tokenProviderWrapper -from skyflow.service_account import generate_bearer_token -from skyflow.errors._skyflow_errors import * - - -class TestTokenProviderWrapper(unittest.TestCase): - - def setUp(self) -> None: - return super().setUp() - - def testInvalidStoredToken(self): - env_values = dotenv.dotenv_values('.env') - - def tokenProvider(): - newerToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return newerToken - - try: - tokenProviderWrapper('invalid', tokenProvider, "Test") - self.fail('Should have thrown invalid jwt error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.JWT_DECODE_ERROR.value) - - def testNoStoredToken(self): - env_values = dotenv.dotenv_values('.env') - self.newToken = '' - - def tokenProvider(): - self.newToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return self.newToken - - try: - newerToken = tokenProviderWrapper('', tokenProvider, "Test") - self.assertEqual(newerToken, self.newToken) - except SkyflowError as e: - self.fail('Should have decoded token') - - def testStoredTokenNotExpired(self): - env_values = dotenv.dotenv_values('.env') - self.newerToken = '' - - def tokenProvider(): - self.newerToken, _ = generate_bearer_token( - env_values['CREDENTIALS_FILE_PATH']) - return self.newerToken - - try: - newToken = tokenProviderWrapper( - tokenProvider(), tokenProvider, "Test") - self.assertEqual(newToken, self.newerToken) - except SkyflowError as e: - self.fail('Should have decoded token') diff --git a/tests/vault/test_update.py b/tests/vault/test_update.py deleted file mode 100644 index c6a00ef2..00000000 --- a/tests/vault/test_update.py +++ /dev/null @@ -1,184 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import json -import unittest -import os -import asyncio -import warnings - -from dotenv import dotenv_values -from skyflow.vault._client import Client, Configuration -from skyflow.vault._update import sendUpdateRequests, createUpdateResponseBody -from skyflow.errors._skyflow_errors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages -from skyflow.service_account import generate_bearer_token -from skyflow.vault._client import Client -from skyflow.vault._config import UpdateOptions - - -class TestUpdate(unittest.TestCase): - - def setUp(self) -> None: - self.envValues = dotenv_values(".env") - self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/') - self.mocked_futures = [] - self.event_loop = asyncio.new_event_loop() - - def tokenProvider(): - token, _ = generate_bearer_token( - self.envValues["CREDENTIALS_FILE_PATH"]) - return token - - config = Configuration( - self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider) - self.client = Client(config) - warnings.filterwarnings( - action="ignore", message="unclosed", category=ResourceWarning) - return super().setUp() - - def add_mock_response(self, response, statusCode, encode=True): - future = asyncio.Future(loop=self.event_loop) - if encode: - future.set_result((json.dumps(response).encode(), statusCode)) - else: - future.set_result((response, statusCode)) - future.done() - self.mocked_futures.append(future) - - def getDataPath(self, file): - return self.dataPath + file + '.json' - - def testUpdateNoRecords(self): - invalidData = {} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value) - - def testUpdateInvalidType(self): - invalidData = {"records": "invalid"} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str)) - - def testUpdateNoIds(self): - invalidData = {"records": [ - {"table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.IDS_KEY_ERROR.value) - - def testUpdateInvalidIdType(self): - invalidData = {"records": [ - {"id": ["123"], "table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_ID_TYPE.value % (list)) - - def testUpdateNoTable(self): - invalidData = {"records": [ - {"id": "id"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.TABLE_KEY_ERROR.value) - - def testUpdateInvalidTableType(self): - invalidData = {"records": [ - {"id": "id1", "table": ["invalid"]}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_TABLE_TYPE.value % (list)) - - def testUpdateNoFields(self): - invalidData = {"records": [ - {"id": "id", "table": "pii_fields"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.FIELDS_KEY_ERROR.value) - - def testUpdateInvalidFieldsType(self): - invalidData = {"records": [ - {"id": "id1", "table": "pii_fields", "fields": "invalid"}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.INVALID_FIELDS_TYPE.value % (str)) - - def testUpdateInvalidFieldsType2(self): - invalidData = {"records": [ - {"id": "id1", "table": "pii_fields", "fields": {}}]} - try: - self.client.update(invalidData) - self.fail('Should have thrown an error') - except SkyflowError as e: - self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value) - self.assertEqual( - e.message, SkyflowErrorMessages.UPDATE_FIELD_KEY_ERROR.value) - - def testResponseBodySuccess(self): - response = {"skyflow_id": "123", "tokens": {"first_name": "John"}} - mock_response = [{"id": "123", "fields": {"first_name": "John"}}] - self.add_mock_response(response, 200) - print("Seld.mockedFuturs", self.mocked_futures) - res, partial = createUpdateResponseBody(self.mocked_futures) - self.assertEqual(partial, False) - self.assertEqual(res, {"records": mock_response, "errors": []}) - - def testResponseBodyPartialSuccess(self): - success_response = {"skyflow_id": "123", "tokens": {"first_name": "John"}} - mock_success_response = [{"id": "123", "fields": {"first_name": "John"}}] - error_response = {"error": {"http_code": 404, "message": "not found"}} - self.add_mock_response(success_response, 200) - self.add_mock_response(error_response, 404) - res, partial = createUpdateResponseBody(self.mocked_futures) - self.assertTrue(partial) - self.assertEqual(res["records"], mock_success_response) - errors = res["errors"] - - self.assertIsNotNone(errors) - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]["error"]["code"], - error_response["error"]["http_code"]) - self.assertEqual( - errors[0]["error"]["description"], error_response["error"]["message"]) - - def testResponseNotJson(self): - response = "not a valid json".encode() - self.add_mock_response(response, 200, encode=False) - try: - createUpdateResponseBody(self.mocked_futures) - except SkyflowError as error: - expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON - self.assertEqual(error.code, 200) - self.assertEqual(error.message, expectedError.value % - response.decode('utf-8')) diff --git a/tests/vault/test_url_encoder.py b/tests/vault/test_url_encoder.py deleted file mode 100644 index 1e4c8443..00000000 --- a/tests/vault/test_url_encoder.py +++ /dev/null @@ -1,115 +0,0 @@ -''' - Copyright (c) 2022 Skyflow, Inc. -''' -import platform -import sys -import unittest -from unittest import mock -from skyflow._utils import http_build_query, getMetrics -from skyflow.version import SDK_VERSION - -class TestUrlEncoder(unittest.TestCase): - def setUp(self) -> None: - return super().setUp() - - def test_encoder_simple(self): - data = { - "key": "value" - } - - http_data = http_build_query(data) - self.assertEqual(http_data, "key=value") - - def test_encoder_multiplekeys(self): - data = { - "key": "value", - "key2": "value2" - } - - http_data = http_build_query(data) - self.assertEqual(http_data, "key=value&key2=value2") - - def test_encoder_nested(self): - data = { - "key": "value", - "nested": { - "key": "value" - } - } - - http_data = http_build_query(data) - - self.assertEqual(http_data, "key=value&nested%5Bkey%5D=value") - - def test_encoder_array(self): - data = { - "key": "value", - "nested": { - "array": ["one", "two"], - "key": "value" - } - } - http_data = http_build_query(data) - - self.assertEqual( - http_data, "key=value&nested%5Barray%5D%5B0%5D=one&nested%5Barray%5D%5B1%5D=two&nested%5Bkey%5D=value") - - # Test Case 1: Success case - def test_get_metrics(self): - expected = { - 'sdk_name_version': "skyflow-python@" + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version, - } - actual = getMetrics() - self.assertEqual(actual, expected) - - @mock.patch('platform.node', return_value='') - def test_getMetrics_no_device_model(self, mock_node): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': '', - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - - actual_output = getMetrics() - expected_output['sdk_client_device_model'] = '' - self.assertEqual(actual_output, expected_output) - - @mock.patch('platform.node', return_value='Mocked Device Model') - def test_getMetrics_with_device_model(self, mock_node): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': 'Mocked Device Model', - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output) - - @mock.patch('sys.platform', return_value='mocked_os') - def test_getMetrics_with_os_details(self, mock_platform): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': "Python " + sys.version - } - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output) - - def test_getMetrics_with_runtime_details(self): - expected_output = { - 'sdk_name_version': 'skyflow-python@' + SDK_VERSION, - 'sdk_client_device_model': platform.node(), - 'sdk_client_os_details': sys.platform, - 'sdk_runtime_details': 'Python ' + 'mocked_version' - } - - with mock.patch('sys.version', 'mocked_version'), \ - mock.patch('sys.version_info', new=(3, 11, 2)): - actual_output = getMetrics() - self.assertEqual(actual_output, expected_output)