diff --git a/.flake8 b/.flake8
index e758ffec45..9311ae8d02 100644
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,7 @@
[flake8]
max-line-length = 88
exclude = cookiecutter
+ignore = E, W
per-file-ignores =
# Don't require docstrings conventions in private modules
singer_sdk/helpers/_*.py:DAR
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
index 7d3b1a8974..f6cf3f8a70 100644
--- a/.github/ISSUE_TEMPLATE/bug.yml
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -15,7 +15,7 @@ body:
attributes:
label: Singer SDK Version
description: Version of the library you are using
- placeholder: "0.30.0"
+ placeholder: "0.33.1"
validations:
required: true
- type: checkboxes
@@ -36,6 +36,7 @@ body:
- "3.8"
- "3.9"
- "3.10"
+ - "3.11"
- "NA"
validations:
required: true
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 4d01dd881f..ae0a1fdb6d 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -5,7 +5,6 @@ updates:
schedule:
interval: daily
time: "12:00"
- timezone: "UTC"
reviewers: [meltano/engineering]
labels: [deps]
- package-ecosystem: pip
@@ -13,7 +12,6 @@ updates:
schedule:
interval: daily
time: "12:00"
- timezone: "UTC"
reviewers: [meltano/engineering]
labels: [deps]
- package-ecosystem: github-actions
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 75b4dc276a..5b3bc04f5a 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -9,7 +9,13 @@ name: "CodeQL"
on:
push:
branches: [ "main" ]
- pull_request: {}
+ paths:
+ - .github/workflows/codeql-analysis.yml
+ - '**.py' # Any Python file
+ pull_request:
+ paths:
+ - .github/workflows/codeql-analysis.yml
+ - '**.py' # Any Python file
schedule:
- cron: '37 10 * * 5'
@@ -31,7 +37,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml
new file mode 100644
index 0000000000..e41b0b51bb
--- /dev/null
+++ b/.github/workflows/codspeed.yml
@@ -0,0 +1,44 @@
+name: codspeed
+
+on:
+ push:
+ branches:
+ - "main"
+ pull_request:
+ # `workflow_dispatch` allows CodSpeed to trigger backtest
+ # performance analysis in order to generate initial data.
+ workflow_dispatch:
+
+jobs:
+ benchmarks:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out the repository
+ uses: actions/checkout@v4.1.1
+
+ - name: Setup Python
+ uses: actions/setup-python@v4.7.1
+ with:
+ python-version: 3.11
+ architecture: x64
+
+ - name: Install poetry
+ run: |
+ curl -fsS https://install.python-poetry.org | python - -y
+
+ - name: Configure poetry
+ run: poetry config virtualenvs.create false
+
+ - name: Install project
+ run: >
+ poetry install
+ -vvv
+ --with dev
+ --with benchmark
+ --all-extras
+
+ - name: Run benchmarks
+ uses: CodSpeedHQ/action@v1
+ with:
+ token: ${{ secrets.CODSPEED_TOKEN }}
+ run: pytest tests/ --codspeed
diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt
index 246d39c69c..0e226fe7b2 100644
--- a/.github/workflows/constraints.txt
+++ b/.github/workflows/constraints.txt
@@ -1,5 +1,6 @@
-pip==23.2
-poetry==1.5.1
-pre-commit==3.3.3
+pip==23.3.1
+poetry==1.7.1
+poetry-plugin-export==1.6.0
+pre-commit==3.5.0
nox==2023.4.22
nox-poetry==1.0.3
diff --git a/.github/workflows/cookiecutter-e2e.yml b/.github/workflows/cookiecutter-e2e.yml
index a88fd6bddf..0df1fa2d83 100644
--- a/.github/workflows/cookiecutter-e2e.yml
+++ b/.github/workflows/cookiecutter-e2e.yml
@@ -3,10 +3,16 @@ name: E2E Cookiecutters
on:
pull_request:
types: [opened, synchronize, reopened]
- paths: ["cookiecutter/**", "e2e-tests/cookiecutters/**"]
+ paths:
+ - "cookiecutter/**"
+ - "e2e-tests/cookiecutters/**"
+ - ".github/workflows/cookiecutter-e2e.yml"
push:
branches: [main]
- paths: ["cookiecutter/**", "e2e-tests/cookiecutters/**"]
+ paths:
+ - "cookiecutter/**"
+ - "e2e-tests/cookiecutters/**"
+ - ".github/workflows/cookiecutter-e2e.yml"
workflow_dispatch:
concurrency:
@@ -18,17 +24,17 @@ env:
jobs:
lint:
- name: Cookiecutter E2E ${{ matrix.python-version }} ${{ matrix.python-version }} / ${{ matrix.os }}
+ name: Cookiecutter E2E Python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
include:
- - { python-version: "3.10", os: "ubuntu-latest" }
+ - { python-version: "3.11", os: "ubuntu-latest" }
steps:
- name: Check out the repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: Upgrade pip
env:
@@ -38,12 +44,16 @@ jobs:
pip --version
- name: Install Poetry
+ env:
+ PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- name: Setup Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v4.7.1
with:
python-version: ${{ matrix.python-version }}
architecture: x64
@@ -59,8 +69,8 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
@@ -74,7 +84,10 @@ jobs:
path: |
/tmp/tap-*
/tmp/target-*
+ /tmp/mapper-*
!/tmp/tap-*/.mypy_cache/
!/tmp/target-*/.mypy_cache/
+ !/tmp/mapper-*/.mypy_cache/
!/tmp/tap-*/.tox/
!/tmp/target-*/.tox/
+ !/tmp/mapper-*/.tox/
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index 8048f9cad2..b236f031aa 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -5,9 +5,6 @@ on:
workflow_dispatch:
inputs: {}
-env:
- FOSSA_CLI_INSTALLER_VERSION: '3.3.10'
-
permissions:
contents: read
@@ -16,26 +13,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: GitHub dependency vulnerability check
if: ${{ github.event_name == 'pull_request_target' }}
- uses: actions/dependency-review-action@v3.0.6
+ uses: actions/dependency-review-action@v3.1.3
with:
fail-on-severity: high
-
- - name: FOSSA dependency license check
- run: |
- # `$FOSSA_CLI_INSTALLER_VERSION` only controls the version of the installer used - the latest version of `fossa-cli` will always be used.
- curl --no-progress-meter -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/v${FOSSA_CLI_INSTALLER_VERSION}/install-latest.sh | bash
-
- echo '## FOSSA dependency license check' >> $GITHUB_STEP_SUMMARY
- echo '' >> $GITHUB_STEP_SUMMARY
-
- fossa analyze --fossa-api-key ${{ secrets.MELTYBOT_FOSSA_API_KEY }} --revision ${{ github.sha }} |& tee fossa_analyze.log
- fossa test --fossa-api-key ${{ secrets.MELTYBOT_FOSSA_API_KEY }} --revision ${{ github.sha }}
-
- TEST_FAILED=$?
- FOSSA_REPORT_LINK="$(grep -A 1 '[ INFO] View FOSSA Report:' fossa_analyze.log | tail -n 1 | sed -e 's/^\[ INFO\]\s*//')"
- echo "[FOSSA detected $([ $TEST_FAILED -ne 0 ] && echo -n '' || echo 'no ')issues](${FOSSA_REPORT_LINK})" >> $GITHUB_STEP_SUMMARY
- exit $TEST_FAILED
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 73eea59534..e1f42fd100 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,8 +1,9 @@
name: Release
on:
- release:
- types: [published]
+ push:
+ tags:
+ - v*
permissions:
contents: write # Needed to upload artifacts to the release
@@ -16,12 +17,12 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: Set up Python
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v4.7.1
with:
- python-version: "3.10"
+ python-version: "3.11"
- name: Upgrade pip
env:
@@ -56,4 +57,4 @@ jobs:
file_glob: true
- name: Publish
- uses: pypa/gh-action-pypi-publish@v1.8.8
+ uses: pypa/gh-action-pypi-publish@v1.8.10
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 495b4e58f5..84306c9104 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -37,7 +37,7 @@ env:
jobs:
tests:
- name: Test on ${{ matrix.python-version }} (${{ matrix.session }}) / ${{ matrix.os }}
+ name: "Test on ${{ matrix.python-version }} (${{ matrix.session }}) / ${{ matrix.os }} / SQLAlchemy: ${{ matrix.sqlalchemy }}"
runs-on: ${{ matrix.os }}
env:
NOXSESSION: ${{ matrix.session }}
@@ -47,23 +47,27 @@ jobs:
session: [tests]
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ sqlalchemy: ["2.*"]
include:
- - { session: doctest, python-version: "3.10", os: "ubuntu-latest" }
- - { session: mypy, python-version: "3.8", os: "ubuntu-latest" }
+ - { session: tests, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "1.*" }
+ - { session: doctest, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "2.*" }
+ - { session: mypy, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "2.*" }
steps:
- name: Check out the repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: Install Poetry
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- name: Setup Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v4.7.1
with:
python-version: ${{ matrix.python-version }}
architecture: x64
@@ -81,11 +85,13 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
+ env:
+ SQLALCHEMY_VERSION: ${{ matrix.sqlalchemy }}
run: |
nox --python=${{ matrix.python-version }}
@@ -101,6 +107,8 @@ jobs:
runs-on: ubuntu-latest
if: ${{ !github.event.pull_request.head.repo.fork }}
env:
+ NOXPYTHON: "3.11"
+ NOXSESSION: tests
SAMPLE_TAP_GITLAB_AUTH_TOKEN: ${{ secrets.SAMPLE_TAP_GITLAB_AUTH_TOKEN }}
SAMPLE_TAP_GITLAB_GROUP_IDS: ${{ secrets.SAMPLE_TAP_GITLAB_GROUP_IDS }}
SAMPLE_TAP_GITLAB_PROJECT_IDS: ${{ secrets.SAMPLE_TAP_GITLAB_PROJECT_IDS }}
@@ -111,19 +119,21 @@ jobs:
steps:
- name: Check out the repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: Install Poetry
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- - name: Setup Python 3.10
- uses: actions/setup-python@v4.7.0
+ - name: Setup Python
+ uses: actions/setup-python@v4.7.1
with:
- python-version: '3.10'
+ python-version: ${{ env.NOXPYTHON }}
architecture: x64
cache: 'pip'
cache-dependency-path: 'poetry.lock'
@@ -139,13 +149,13 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
run: |
- nox -s tests -p 3.10 -- -m "external"
+ nox -- -m "external"
coverage:
name: Coverage
@@ -153,17 +163,21 @@ jobs:
needs: tests
steps:
- name: Check out the repository
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4.1.1
- name: Install Poetry
+ env:
+ PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
+ pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- name: Set up Python
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v4.7.1
with:
- python-version: '3.10'
+ python-version: '3.11'
cache: 'pip'
cache-dependency-path: 'poetry.lock'
@@ -181,8 +195,8 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Combine coverage data and display human readable report
diff --git a/.github/workflows/version_bump.yml b/.github/workflows/version_bump.yml
index 7503b030fa..0ba2e9044a 100644
--- a/.github/workflows/version_bump.yml
+++ b/.github/workflows/version_bump.yml
@@ -35,19 +35,19 @@ jobs:
pull-requests: write # to create and update PRs
steps:
- - uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4.1.1
with:
fetch-depth: 0
- name: Set up Python
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v4.7.1
with:
- python-version: "3.10"
+ python-version: "3.11"
architecture: x64
- name: Bump version
id: cz-bump
- uses: commitizen-tools/commitizen-action@0.18.2
+ uses: commitizen-tools/commitizen-action@0.20.0
with:
increment: ${{ github.event.inputs.bump != 'auto' && github.event.inputs.bump || '' }}
prerelease: ${{ github.event.inputs.prerelease != 'none' && github.event.inputs.prerelease || '' }}
@@ -66,8 +66,7 @@ jobs:
body_path: _changelog_fragment.md
tag_name: v${{ steps.cz-bump.outputs.version }}
prerelease: ${{ github.event.inputs.prerelease != 'none' }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ token: ${{ secrets.MELTYBOT_GITHUB_AUTH_TOKEN }}
- name: Set repo file permissions
run: |
@@ -77,6 +76,7 @@ jobs:
uses: peter-evans/create-pull-request@v5
id: create-pull-request
with:
+ token: ${{ secrets.MELTYBOT_GITHUB_AUTH_TOKEN }}
commit-message: "chore: Bump package version"
title: "chore: Release v${{ steps.cz-bump.outputs.version }}"
body: |
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3c8a18afe2..9975c91a06 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -17,7 +17,9 @@ repos:
exclude: |
(?x)^(
cookiecutter/.*/meltano.yml|
- cookiecutter/.*/.pre-commit-config.yaml
+ cookiecutter/.*/.pre-commit-config.yaml|
+ cookiecutter/.*/dependabot.yml|
+ cookiecutter/.*/test.yml
)$
- id: end-of-file-fixer
exclude: |
@@ -36,14 +38,14 @@ repos:
)$
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.3
+ rev: 0.27.2
hooks:
- id: check-dependabot
- id: check-github-workflows
- id: check-readthedocs
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.0.278
+ rev: v0.1.6
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
@@ -51,20 +53,14 @@ repos:
(?x)^(
cookiecutter/.*
)$
-
-- repo: https://github.com/psf/black
- rev: 23.7.0
- hooks:
- - id: black
+ - id: ruff-format
exclude: |
- (?x)^(
- cookiecutter/.*|
- singer_sdk/helpers/_simpleeval.py|
- tests/core/test_simpleeval.py
- )$
+ (?x)^(
+ cookiecutter/.*
+ )$
- repo: https://github.com/pycqa/flake8
- rev: 6.0.0
+ rev: 6.1.0
hooks:
- id: flake8
additional_dependencies:
@@ -76,6 +72,6 @@ repos:
)$
- repo: https://github.com/python-poetry/poetry
- rev: 1.5.0
+ rev: 1.7.0
hooks:
- id: poetry-check
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 56198b5973..0c8bf24feb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,96 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## v0.33.1 (2023-11-08)
+
+### 🐛 Fixes
+
+- [#2035](https://github.com/meltano/sdk/issues/2035) Retry all 5xx status codes -- _**Thanks @asamasoma!**_
+
+## v0.33.0 (2023-10-12)
+
+### ✨ New
+
+- [#1999](https://github.com/meltano/sdk/issues/1999) Log JSONPath match count at the INFO level
+- [#1779](https://github.com/meltano/sdk/issues/1779) Cache SQL columns and schemas
+- [#2003](https://github.com/meltano/sdk/issues/2003) Add ability to do list comprehensions in stream map expressions -- _**Thanks @haleemur!**_
+- [#2018](https://github.com/meltano/sdk/issues/2018) Drop Python 3.7 support in cookiecutter templates -- _**Thanks @visch!**_
+
+### 🐛 Fixes
+
+- [#2006](https://github.com/meltano/sdk/issues/2006) Parse record `time_extracted` into `datetime.datetime` instance
+- [#1996](https://github.com/meltano/sdk/issues/1996) Respect nullability of leaf properties when flattening schema
+- [#1844](https://github.com/meltano/sdk/issues/1844) Safely skip parsing record field as date-time if it is missing in schema
+- [#1885](https://github.com/meltano/sdk/issues/1885) Map `record` field to a JSON `object` type
+- [#2015](https://github.com/meltano/sdk/issues/2015) Ensure `default` property is passed to SCHEMA messages -- _**Thanks @prakharcode!**_
+
+### 📚 Documentation Improvements
+
+- [#2017](https://github.com/meltano/sdk/issues/2017) Document support for comprehensions in stream maps
+
+## v0.32.0 (2023-09-22)
+
+### ✨ New
+
+- [#1893](https://github.com/meltano/sdk/issues/1893) Standard configurable load methods
+- [#1861](https://github.com/meltano/sdk/issues/1861) SQLTap connector instance shared with streams -- _**Thanks @BuzzCutNorman!**_
+
+### 🐛 Fixes
+
+- [#1954](https://github.com/meltano/sdk/issues/1954) Missing begin()s related to SQLAlchemy 2.0 -- _**Thanks @andyoneal!**_
+- [#1951](https://github.com/meltano/sdk/issues/1951) Ensure SQL streams are sorted when a replication key is set
+- [#1949](https://github.com/meltano/sdk/issues/1949) Retry SQLAlchemy engine creation for adapters without JSON SerDe support
+- [#1939](https://github.com/meltano/sdk/issues/1939) Handle `decimal.Decimal` instances in flattening
+- [#1927](https://github.com/meltano/sdk/issues/1927) Handle replication key not found in stream schema -- _**Thanks @mjsqu!**_
+- [#1977](https://github.com/meltano/sdk/issues/1977) Fix hanging downstream tests in tap-postgres
+- [#1970](https://github.com/meltano/sdk/issues/1970) Warn instead of crashing when schema helpers cannot append `null` to types
+
+### ⚡ Performance Improvements
+
+- [#1925](https://github.com/meltano/sdk/issues/1925) Add viztracer command for testing targets -- _**Thanks @mjsqu!**_
+
+- [#1962](https://github.com/meltano/sdk/issues/1962) Ensure `raw_schema` in stream mapper is immutable
+
+## v0.31.1 (2023-08-17)
+
+### ✨ New
+
+- [#1905](https://github.com/meltano/sdk/issues/1905) Add email field and use human-readable questions in templates
+
+### 🐛 Fixes
+
+- [#1913](https://github.com/meltano/sdk/issues/1913) Fix tap tests for multiple test classes with different input catalogs
+
+## v0.31.0 (2023-08-07)
+
+### ✨ New
+
+- [#1892](https://github.com/meltano/sdk/issues/1892) Add a mapper cookiecutter template
+- [#1864](https://github.com/meltano/sdk/issues/1864) SQLTarget connector instance shared with sinks -- _**Thanks @BuzzCutNorman!**_
+- [#1878](https://github.com/meltano/sdk/issues/1878) Add `_sdc_sync_started_at` metadata column to indicate the start of the target process
+- [#1484](https://github.com/meltano/sdk/issues/1484) Bump latest supported sqlalchemy from `1.*` to `2.*`
+
+### 🐛 Fixes
+
+- [#1898](https://github.com/meltano/sdk/issues/1898) Correctly serialize `decimal.Decimal` in JSON fields of SQL targets
+- [#1881](https://github.com/meltano/sdk/issues/1881) Expose `add_record_metadata` as a builtin target setting
+- [#1880](https://github.com/meltano/sdk/issues/1880) Append batch config if target supports the batch capability
+- [#1865](https://github.com/meltano/sdk/issues/1865) Handle missing record properties in SQL sinks
+- [#1838](https://github.com/meltano/sdk/issues/1838) Add deprecation warning when importing legacy testing helpers
+- [#1842](https://github.com/meltano/sdk/issues/1842) Ensure all expected tap parameters are passed to `SQLTap` initializer
+- [#1853](https://github.com/meltano/sdk/issues/1853) Check against the unconformed key properties when validating record keys
+- [#1843](https://github.com/meltano/sdk/issues/1843) Target template should not reference `tap_id`
+- [#1708](https://github.com/meltano/sdk/issues/1708) Finalize and write last state message with dedupe
+- [#1835](https://github.com/meltano/sdk/issues/1835) Avoid setting up mapper in discovery mode
+
+### ⚙️ Under the Hood
+
+- [#1877](https://github.com/meltano/sdk/issues/1877) Use `importlib.resources` instead of `__file__` to retrieve sample Singer output files
+
+### 📚 Documentation Improvements
+
+- [#1852](https://github.com/meltano/sdk/issues/1852) Fix stale `pip_url` example that uses shell script workaround for editable installation
+
## v0.30.0 (2023-07-10)
### ✨ New
diff --git a/cookiecutter/mapper-template/README.md b/cookiecutter/mapper-template/README.md
new file mode 100644
index 0000000000..70e2e47e84
--- /dev/null
+++ b/cookiecutter/mapper-template/README.md
@@ -0,0 +1,24 @@
+# Singer Mapper Template
+
+To use this cookie cutter template:
+
+```bash
+pip3 install pipx
+pipx ensurepath
+# You may need to reopen your shell at this point
+pipx install cookiecutter
+```
+
+Initialize Cookiecutter template directly from Git:
+
+```bash
+cookiecutter https://github.com/meltano/sdk --directory="cookiecutter/mapper-template"
+```
+
+Or locally from an already-cloned `sdk` repo:
+
+```bash
+cookiecutter ./sdk/cookiecutter/mapper-template
+```
+
+See the [dev guide](https://sdk.meltano.com/en/latest/dev_guide.html).
diff --git a/cookiecutter/mapper-template/cookiecutter.json b/cookiecutter/mapper-template/cookiecutter.json
new file mode 100644
index 0000000000..c42b1cf06f
--- /dev/null
+++ b/cookiecutter/mapper-template/cookiecutter.json
@@ -0,0 +1,19 @@
+{
+ "name": "MyMapperName",
+ "admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
+ "mapper_id": "mapper-{{ cookiecutter.name.lower() }}",
+ "library_name": "{{ cookiecutter.mapper_id.replace('-', '_') }}",
+ "variant": "None (Skip)",
+ "include_ci_files": ["GitHub", "None (Skip)"],
+ "license": ["Apache-2.0"],
+ "__prompts__": {
+ "name": "The name of the mapper, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "mapper_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
+}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
new file mode 100644
index 0000000000..0cfc81005d
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
@@ -0,0 +1,30 @@
+### A CI workflow template that runs linting and python testing
+### TODO: Modify as needed or as desired.
+
+name: Test {{cookiecutter.mapper_id}}
+
+on: [push]
+
+jobs:
+ pytest:
+ runs-on: ubuntu-latest
+ env:
+ GITHUB_TOKEN: {{ '${{secrets.GITHUB_TOKEN}}' }}
+ strategy:
+ matrix:
+ python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python {{ '${{ matrix.python-version }}' }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: {{ '${{ matrix.python-version }}' }}
+ - name: Install Poetry
+ run: |
+ pip install poetry
+ - name: Install dependencies
+ run: |
+ poetry install
+ - name: Test with pytest
+ run: |
+ poetry run pytest
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
rename to cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore
new file mode 100644
index 0000000000..475019c316
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore
@@ -0,0 +1,136 @@
+# Secrets and internal config files
+**/.secrets/*
+
+# Ignore meltano internal cache and sqlite systemdb
+
+.meltano/
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml
new file mode 100644
index 0000000000..0cff18a2f2
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml
@@ -0,0 +1,32 @@
+ci:
+ autofix_prs: true
+ autoupdate_schedule: weekly
+ autoupdate_commit_msg: 'chore: pre-commit autoupdate'
+
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: check-json
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+
+- repo: https://github.com/python-jsonschema/check-jsonschema
+ rev: 0.27.1
+ hooks:
+ - id: check-dependabot
+ - id: check-github-workflows
+
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.6
+ hooks:
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix, --show-fixes]
+ - id: ruff-format
+
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.7.0
+ hooks:
+ - id: mypy
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore
new file mode 100644
index 0000000000..33c6acd03e
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore
@@ -0,0 +1,10 @@
+# IMPORTANT! This folder is hidden from git - if you need to store config files or other secrets,
+# make sure those are never staged for commit into your git repo. You can store them here or another
+# secure location.
+#
+# Note: This may be redundant with the global .gitignore for, and is provided
+# for redundancy. If the `.secrets` folder is not needed, you may delete it
+# from the project.
+
+*
+!.gitignore
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md
new file mode 100644
index 0000000000..ded365fb2c
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md
@@ -0,0 +1,128 @@
+# {{ cookiecutter.mapper_id }}
+
+`{{ cookiecutter.mapper_id }}` is a Singer mapper for {{ cookiecutter.name }}.
+
+Built with the [Meltano Mapper SDK](https://sdk.meltano.com) for Singer Mappers.
+
+
+
+## Configuration
+
+### Accepted Config Options
+
+
+
+A full list of supported settings and capabilities for this
+mapper is available by running:
+
+```bash
+{{ cookiecutter.mapper_id }} --about
+```
+
+### Configure using environment variables
+
+This Singer mapper will automatically import any environment variables within the working directory's
+`.env` if the `--config=ENV` is provided, such that config values will be considered if a matching
+environment variable is set either in the terminal context or in the `.env` file.
+
+### Source Authentication and Authorization
+
+
+
+## Usage
+
+You can easily run `{{ cookiecutter.mapper_id }}` by itself or in a pipeline using [Meltano](https://meltano.com/).
+
+### Executing the Mapper Directly
+
+```bash
+{{ cookiecutter.mapper_id }} --version
+{{ cookiecutter.mapper_id }} --help
+```
+
+## Developer Resources
+
+Follow these instructions to contribute to this project.
+
+### Initialize your Development Environment
+
+```bash
+pipx install poetry
+poetry install
+```
+
+### Create and Run Tests
+
+Create tests within the `tests` subfolder and
+ then run:
+
+```bash
+poetry run pytest
+```
+
+You can also test the `{{cookiecutter.mapper_id}}` CLI interface directly using `poetry run`:
+
+```bash
+poetry run {{cookiecutter.mapper_id}} --help
+```
+
+### Testing with [Meltano](https://www.meltano.com)
+
+_**Note:** This mapper will work in any Singer environment and does not require Meltano.
+Examples here are for convenience and to streamline end-to-end orchestration scenarios._
+
+
+
+Next, install Meltano (if you haven't already) and any needed plugins:
+
+```bash
+# Install meltano
+pipx install meltano
+# Initialize meltano within this directory
+cd {{ cookiecutter.mapper_id }}
+meltano install
+```
+
+Now you can test and orchestrate using Meltano:
+
+```bash
+# Run a test `run` pipeline:
+meltano run tap-smoke-test {{ cookiecutter.mapper_id }} target-jsonl
+```
+
+### SDK Dev Guide
+
+See the [dev guide](https://sdk.meltano.com/en/latest/dev_guide.html) for more instructions on how to use the SDK to
+develop your own taps, targets, and mappers.
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml
new file mode 100644
index 0000000000..019015d06e
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml
@@ -0,0 +1,31 @@
+version: 1
+send_anonymous_usage_stats: true
+project_id: "{{cookiecutter.mapper_id}}"
+default_environment: test
+environments:
+- name: test
+plugins:
+ extractors:
+ - name: tap-smoke-test
+ variant: meltano
+ pip_url: git+https://github.com/meltano/tap-smoke-test.git
+ config:
+ streams:
+ - stream_name: animals
+ input_filename: https://raw.githubusercontent.com/meltano/tap-smoke-test/main/demo-data/animals-data.jsonl
+ loaders:
+ - name: target-jsonl
+ variant: andyh1203
+ pip_url: target-jsonl
+ mappers:
+ - name: "{{cookiecutter.mapper_id}}"
+ pip_url: -e .
+ namespace: "{{cookiecutter.library_name}}"
+ # TODO: replace these with the actual settings
+ settings:
+ - name: example_config
+ kind: string
+ mappings:
+ - name: example
+ config:
+ example_config: foo
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore
new file mode 100644
index 0000000000..80ff9d2a61
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore
@@ -0,0 +1,4 @@
+# This directory is used as a target by target-jsonl, so ignore all files
+
+*
+!.gitignore
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml
new file mode 100644
index 0000000000..deab56d7dc
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml
@@ -0,0 +1,67 @@
+[tool.poetry]
+{%- if cookiecutter.variant != "None (Skip)" %}
+name = "{{cookiecutter.variant}}-{{cookiecutter.mapper_id}}"
+{%- else %}
+name = "{{cookiecutter.mapper_id}}"
+{%- endif %}
+version = "0.0.1"
+description = "`{{cookiecutter.mapper_id}}` is a Singer mapper {{cookiecutter.name}}, built with the Meltano Singer SDK."
+readme = "README.md"
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
+keywords = [
+ "ELT",
+ "Mapper",
+ "{{cookiecutter.name}}",
+]
+license = "Apache-2.0"
+{%- if cookiecutter.variant != "None (Skip)" %}
+packages = [
+ { include = "{{cookiecutter.library_name}}" },
+]
+{%- endif %}
+
+[tool.poetry.dependencies]
+python = ">=3.8,<4"
+singer-sdk = { version="~=0.33.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
+
+[tool.poetry.group.dev.dependencies]
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.33.1", extras = ["testing"] }
+
+[tool.poetry.extras]
+s3 = ["fs-s3fs"]
+
+[tool.mypy]
+python_version = "3.9"
+warn_unused_configs = true
+
+[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py37"
+
+[tool.ruff.lint]
+ignore = [
+ "ANN101", # missing-type-self
+ "ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
+]
+select = ["ALL"]
+
+[tool.ruff.lint.flake8-annotations]
+allow-star-arg-any = true
+
+[tool.ruff.lint.isort]
+known-first-party = ["{{cookiecutter.library_name}}"]
+
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[build-system]
+requires = ["poetry-core>=1.0.8"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.poetry.scripts]
+# CLI declaration
+{{cookiecutter.mapper_id}} = '{{cookiecutter.library_name}}.mapper:{{cookiecutter.name}}Mapper.cli'
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py
new file mode 100644
index 0000000000..7caba56f78
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py
@@ -0,0 +1 @@
+"""Test suite for {{ cookiecutter.mapper_id }}."""
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py
new file mode 100644
index 0000000000..6bb3ec2d7a
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py
@@ -0,0 +1,3 @@
+"""Test Configuration."""
+
+pytest_plugins = ("singer_sdk.testing.pytest_plugin",)
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini
new file mode 100644
index 0000000000..70b9e4ac7e
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini
@@ -0,0 +1,19 @@
+# This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy
+
+[tox]
+envlist = py37, py38, py39, py310, py311
+isolated_build = true
+
+[testenv]
+allowlist_externals = poetry
+commands =
+ poetry install -v
+ poetry run pytest
+
+[testenv:pytest]
+# Run the python tests.
+# To execute, run `tox -e pytest`
+envlist = py37, py38, py39, py310, py311
+commands =
+ poetry install -v
+ poetry run pytest
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
rename to cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py
new file mode 100644
index 0000000000..5781fbbc43
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py
@@ -0,0 +1 @@
+"""{{ cookiecutter.name }} Mapper."""
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py
new file mode 100644
index 0000000000..c8c3d23ec8
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py
@@ -0,0 +1,96 @@
+"""{{ cookiecutter.name }} mapper class."""
+
+from __future__ import annotations
+
+import typing as t
+from typing import TYPE_CHECKING
+
+import singer_sdk.typing as th
+from singer_sdk import _singerlib as singer
+from singer_sdk.mapper import PluginMapper
+from singer_sdk.mapper_base import InlineMapper
+
+if TYPE_CHECKING:
+ from pathlib import PurePath
+
+
+class {{ cookiecutter.name }}Mapper(InlineMapper):
+ """Sample mapper for {{ cookiecutter.name }}."""
+
+ name = "{{ cookiecutter.mapper_id }}"
+
+ config_jsonschema = th.PropertiesList(
+ # TODO: Replace or remove this example config based on your needs
+ th.Property(
+ "example_config",
+ th.StringType,
+ description="An example config, replace or remove based on your needs.",
+ ),
+ ).to_dict()
+
+ def __init__(
+ self,
+ *,
+ config: dict | PurePath | str | list[PurePath | str] | None = None,
+ parse_env_config: bool = False,
+ validate_config: bool = True,
+ ) -> None:
+ """Create a new inline mapper.
+
+ Args:
+ config: Mapper configuration. Can be a dictionary, a single path to a
+ configuration file, or a list of paths to multiple configuration
+ files.
+ parse_env_config: Whether to look for configuration values in environment
+ variables.
+ validate_config: True to require validation of config settings.
+ """
+ super().__init__(
+ config=config,
+ parse_env_config=parse_env_config,
+ validate_config=validate_config,
+ )
+
+ self.mapper = PluginMapper(plugin_config=dict(self.config), logger=self.logger)
+
+ def map_schema_message(self, message_dict: dict) -> t.Iterable[singer.Message]:
+ """Map a schema message to zero or more new messages.
+
+ Args:
+ message_dict: A SCHEMA message JSON dictionary.
+ """
+ yield singer.SchemaMessage.from_dict(message_dict)
+
+ def map_record_message(
+ self,
+ message_dict: dict,
+ ) -> t.Iterable[singer.RecordMessage]:
+ """Map a record message to zero or more new messages.
+
+ Args:
+ message_dict: A RECORD message JSON dictionary.
+ """
+ yield singer.RecordMessage.from_dict(message_dict)
+
+ def map_state_message(self, message_dict: dict) -> t.Iterable[singer.Message]:
+ """Map a state message to zero or more new messages.
+
+ Args:
+ message_dict: A STATE message JSON dictionary.
+ """
+ yield singer.StateMessage.from_dict(message_dict)
+
+ def map_activate_version_message(
+ self,
+ message_dict: dict,
+ ) -> t.Iterable[singer.Message]:
+ """Map a version message to zero or more new messages.
+
+ Args:
+ message_dict: An ACTIVATE_VERSION message JSON dictionary.
+ """
+ yield singer.ActivateVersionMessage.from_dict(message_dict)
+
+
+if __name__ == "__main__":
+ {{ cookiecutter.name }}Mapper.cli()
diff --git a/cookiecutter/tap-template/cookiecutter.json b/cookiecutter/tap-template/cookiecutter.json
index 8da7a099b1..017b311090 100644
--- a/cookiecutter/tap-template/cookiecutter.json
+++ b/cookiecutter/tap-template/cookiecutter.json
@@ -1,6 +1,7 @@
{
"source_name": "MySourceName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"tap_id": "tap-{{ cookiecutter.source_name.lower() }}",
"library_name": "{{ cookiecutter.tap_id.replace('-', '_') }}",
"variant": "None (Skip)",
@@ -13,6 +14,17 @@
"JWT",
"Custom or N/A"
],
- "include_ci_files": ["GitHub", "None (Skip)"],
- "license": ["Apache-2.0"]
+ "include_ci_files": ["GitHub", "None"],
+ "license": ["Apache-2.0", "None"],
+ "__prompts__": {
+ "source_name": "The name of the source, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "tap_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "stream_type": "The type of stream the source provides",
+ "auth_method": "The [bold red]authentication[/] method used by the source, for REST and GraphQL sources",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
}
diff --git a/cookiecutter/tap-template/hooks/post_gen_project.py b/cookiecutter/tap-template/hooks/post_gen_project.py
new file mode 100644
index 0000000000..775a3e1ed0
--- /dev/null
+++ b/cookiecutter/tap-template/hooks/post_gen_project.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+from pathlib import Path
+import shutil
+
+
+PACKAGE_PATH = Path("{{cookiecutter.library_name}}")
+
+
+if __name__ == "__main__":
+ # Rename stream type client and delete others
+ target = PACKAGE_PATH / "client.py"
+ raw_client_py = PACKAGE_PATH / "{{cookiecutter.stream_type|lower}}-client.py"
+ raw_client_py.rename(target)
+
+ for client_py in PACKAGE_PATH.rglob("*-client.py"):
+ client_py.unlink()
+
+ if "{{ cookiecutter.auth_method }}" not in ("OAuth2", "JWT"):
+ PACKAGE_PATH.joinpath("auth.py").unlink()
+
+ if "{{ cookiecutter.stream_type }}" == "SQL":
+ PACKAGE_PATH.joinpath("streams.py").unlink()
+
+ if "{{ cookiecutter.license }}" == "None":
+ Path("LICENSE").unlink()
+
+ if "{{ cookiecutter.include_ci_files }}" != "GitHub":
+ shutil.rmtree(".github")
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml
similarity index 100%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/test.yml
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/test.yml
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
index fe3a4d7ca2..1622d09994 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -14,24 +14,20 @@ repos:
- id: trailing-whitespace
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.0
+ rev: 0.27.1
hooks:
- id: check-dependabot
- id: check-github-workflows
-- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.0.269
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.6
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
-
-- repo: https://github.com/psf/black
- rev: 23.3.0
- hooks:
- - id: black
+ - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.3.0
+ rev: v1.7.0
hooks:
- id: mypy
additional_dependencies:
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE
new file mode 100644
index 0000000000..62913ff3af
--- /dev/null
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+
+ Copyright {% now 'utc', '%Y' %} {{ cookiecutter.admin_name }}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
index 3eaed335bd..d41eacb98c 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
@@ -7,7 +7,7 @@ name = "{{cookiecutter.tap_id}}"
version = "0.0.1"
description = "`{{cookiecutter.tap_id}}` is a Singer tap for {{cookiecutter.source_name}}, built with the Meltano Singer SDK."
readme = "README.md"
-authors = ["{{ cookiecutter.admin_name }}"]
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
keywords = [
"ELT",
"{{cookiecutter.source_name}}",
@@ -20,19 +20,19 @@ packages = [
{%- endif %}
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-singer-sdk = { version="^0.30.0" }
-fs-s3fs = { version = "^1.1.1", optional = true }
+python = ">=3.8,<4"
+singer-sdk = { version="~=0.33.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
{%- if cookiecutter.stream_type in ["REST", "GraphQL"] %}
-requests = "^2.31.0"
+requests = "~=2.31.0"
{%- endif %}
{%- if cookiecutter.auth_method in ("OAuth2", "JWT") %}
-cached-property = "^1" # Remove after Python 3.7 support is dropped
+cached-property = "~=1" # Remove after Python 3.7 support is dropped
{%- endif %}
[tool.poetry.group.dev.dependencies]
-pytest = "^7.2.1"
-singer-sdk = { version="^0.30.0", extras = ["testing"] }
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.33.1", extras = ["testing"] }
[tool.poetry.extras]
s3 = ["fs-s3fs"]
@@ -45,22 +45,25 @@ plugins = "sqlmypy"
{%- endif %}
[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py37"
+
+[tool.ruff.lint]
ignore = [
"ANN101", # missing-type-self
"ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
select = ["ALL"]
-src = ["{{cookiecutter.library_name}}"]
-target-version = "py37"
-
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["{{cookiecutter.library_name}}"]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
[build-system]
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/test_core.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/test_core.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/auth.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/auth.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
similarity index 98%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
index f777e6d008..dae2269dff 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%}
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
@@ -159,7 +159,7 @@ def get_new_paginator(self) -> BaseAPIPaginator:
def get_url_params(
self,
context: dict | None, # noqa: ARG002
- next_page_token: Any | None,
+ next_page_token: Any | None, # noqa: ANN401
) -> dict[str, Any]:
"""Return a dictionary of values to be used in URL parameterization.
@@ -181,7 +181,7 @@ def get_url_params(
def prepare_request_payload(
self,
context: dict | None, # noqa: ARG002
- next_page_token: Any | None, # noqa: ARG002
+ next_page_token: Any | None, # noqa: ARG002, ANN401
) -> dict | None:
"""Prepare the data payload for the REST API request.
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/sql-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/sql-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
similarity index 95%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
index 4200179509..8272cbc24a 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%}
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import typing as t
from pathlib import Path
from singer_sdk import typing as th # JSON Schema typing helpers
@@ -54,7 +55,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream):
),
),
).to_dict()
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = None
graphql_query = """
users {
@@ -81,7 +82,7 @@ class GroupsStream({{ cookiecutter.source_name }}Stream):
th.Property("id", th.StringType),
th.Property("modified", th.DateTimeType),
).to_dict()
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = "modified"
graphql_query = """
groups {
@@ -104,7 +105,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream):
{%- if cookiecutter.stream_type == "REST" %}
path = "/users"
{%- endif %}
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = None
# Optionally, you may also use `schema_filepath` in place of `schema`:
# schema_filepath = SCHEMAS_DIR / "users.json" # noqa: ERA001
@@ -143,7 +144,7 @@ class GroupsStream({{ cookiecutter.source_name }}Stream):
{%- if cookiecutter.stream_type == "REST" %}
path = "/groups"
{%- endif %}
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = "modified"
schema = th.PropertiesList(
th.Property("name", th.StringType),
diff --git a/cookiecutter/target-template/cookiecutter.json b/cookiecutter/target-template/cookiecutter.json
index 4816b54aa2..c7c31835ab 100644
--- a/cookiecutter/target-template/cookiecutter.json
+++ b/cookiecutter/target-template/cookiecutter.json
@@ -1,10 +1,21 @@
{
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-{{ cookiecutter.destination_name.lower() }}",
"library_name": "{{ cookiecutter.target_id.replace('-', '_') }}",
"variant": "None (Skip)",
"serialization_method": ["Per record", "Per batch", "SQL"],
"include_ci_files": ["GitHub", "None (Skip)"],
- "license": ["Apache-2.0"]
+ "license": ["Apache-2.0"],
+ "__prompts__": {
+ "name": "The name of the mapper, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "mapper_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "serialization_method": "The serialization method to use for loading data",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
}
diff --git a/cookiecutter/target-template/hooks/post_gen_project.py b/cookiecutter/target-template/hooks/post_gen_project.py
new file mode 100644
index 0000000000..44edd337bb
--- /dev/null
+++ b/cookiecutter/target-template/hooks/post_gen_project.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+from pathlib import Path
+import shutil
+
+
+BASE_PATH = Path("{{cookiecutter.library_name}}")
+
+
+if __name__ == "__main__":
+ if "{{ cookiecutter.license }}" != "Apache-2.0":
+ Path("LICENSE").unlink()
+
+ if "{{ cookiecutter.include_ci_files }}" != "GitHub":
+ shutil.rmtree(Path(".github"))
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml
new file mode 100644
index 0000000000..933e6b1c26
--- /dev/null
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml
@@ -0,0 +1,26 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: pip
+ directory: "/"
+ schedule:
+ interval: "daily"
+ commit-message:
+ prefix: "chore(deps): "
+ prefix-development: "chore(deps-dev): "
+ - package-ecosystem: pip
+ directory: "/.github/workflows"
+ schedule:
+ interval: daily
+ commit-message:
+ prefix: "ci: "
+ - package-ecosystem: github-actions
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ commit-message:
+ prefix: "ci: "
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/test.yml
similarity index 100%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
rename to cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/test.yml
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
index 8d4c83feae..3f4967db27 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -14,24 +14,20 @@ repos:
- id: trailing-whitespace
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.0
+ rev: 0.27.1
hooks:
- id: check-dependabot
- id: check-github-workflows
-- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.0.269
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.6
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
-
-- repo: https://github.com/psf/black
- rev: 23.3.0
- hooks:
- - id: black
+ - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.3.0
+ rev: v1.7.0
hooks:
- id: mypy
additional_dependencies:
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%} b/cookiecutter/target-template/{{cookiecutter.target_id}}/LICENSE
similarity index 100%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
rename to cookiecutter/target-template/{{cookiecutter.target_id}}/LICENSE
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
index b612331112..810ce46201 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
@@ -7,7 +7,7 @@ name = "{{cookiecutter.target_id}}"
version = "0.0.1"
description = "`{{cookiecutter.target_id}}` is a Singer target for {{cookiecutter.destination_name}}, built with the Meltano Singer SDK."
readme = "README.md"
-authors = ["{{ cookiecutter.admin_name }}"]
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
keywords = [
"ELT",
"{{cookiecutter.destination_name}}",
@@ -20,36 +20,40 @@ packages = [
{%- endif %}
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-singer-sdk = { version="^0.30.0" }
-fs-s3fs = { version = "^1.1.1", optional = true }
+python = ">=3.8,<4"
+singer-sdk = { version="~=0.33.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
{%- if cookiecutter.serialization_method != "SQL" %}
-requests = "^2.31.0"
+requests = "~=2.31.0"
{%- endif %}
[tool.poetry.dev-dependencies]
-pytest = "^7.2.1"
-singer-sdk = { version="^0.30.0", extras = ["testing"] }
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.33.1", extras = ["testing"] }
[tool.poetry.extras]
s3 = ["fs-s3fs"]
[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py37"
+
+[tool.ruff.lint]
ignore = [
"ANN101", # missing-type-self
"ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
select = ["ALL"]
-src = ["{{cookiecutter.library_name}}"]
-target-version = "py37"
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["{{cookiecutter.library_name}}"]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
[build-system]
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
index 2403b2a3a0..6db9a50419 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
@@ -20,7 +20,7 @@
)
-class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): # type: ignore[misc, valid-type] # noqa: E501
+class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): # type: ignore[misc, valid-type]
"""Standard Target Tests."""
@pytest.fixture(scope="class")
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
index 4e84d12844..9edd13a116 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
@@ -35,6 +35,7 @@ class {{ cookiecutter.destination_name }}Connector(SQLConnector):
allow_column_rename: bool = True # Whether RENAME COLUMN is supported.
allow_column_alter: bool = False # Whether altering column types is supported.
allow_merge_upsert: bool = False # Whether MERGE UPSERT is supported.
+ allow_overwrite: bool = False # Whether overwrite load method is supported.
allow_temp_tables: bool = True # Whether temp tables are supported.
def get_sqlalchemy_url(self, config: dict) -> str:
diff --git a/docs/conf.py b/docs/conf.py
index 61ac4b0717..3ce7e00342 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -26,7 +26,7 @@
author = "Meltano Core Team and Contributors"
# The full version, including alpha/beta/rc tags
-release = "0.30.0"
+release = "0.33.1"
# -- General configuration ---------------------------------------------------
@@ -43,6 +43,7 @@
"myst_parser",
"sphinx_reredirects",
"sphinx_inline_tabs",
+ "notfound.extension",
]
# Add any paths that contain templates here, relative to this directory.
@@ -70,7 +71,6 @@
"source_branch": "main",
"source_directory": "docs/",
"sidebar_hide_name": True,
- "announcement": 'Sign up for Public Beta today! Get a 20% discount on purchases before 27th of July!', # noqa: E501
# branding
"light_css_variables": {
"font-stack": "Hanken Grotesk,-apple-system,Helvetica,sans-serif",
diff --git a/docs/deprecation.md b/docs/deprecation.md
index 089355fd98..b27e61b2b8 100644
--- a/docs/deprecation.md
+++ b/docs/deprecation.md
@@ -11,3 +11,5 @@ incompatible way, following their deprecation, as indicated in the
[`RESTStream.get_new_paginator`](singer_sdk.RESTStream.get_new_paginator).
See the [migration guide](./guides/pagination-classes.md) for more information.
+
+- The `singer_sdk.testing.get_standard_tap_tests` and `singer_sdk.testing.get_standard_target_tests` functions will be removed. Replace them with `singer_sdk.testing.get_tap_test_class` and `singer_sdk.testing.get_target_test_class` functions respective to generate a richer test suite.
diff --git a/docs/dev_guide.md b/docs/dev_guide.md
index b93eacedd3..316cca8ad0 100644
--- a/docs/dev_guide.md
+++ b/docs/dev_guide.md
@@ -76,6 +76,24 @@ generated `README.md` file to complete your new tap or target. You can also refe
[Meltano Tutorial](https://docs.meltano.com/tutorials/custom-extractor) for a more
detailed guide.
+````{admonition} Avoid repeating yourself
+ If you find yourself repeating the same inputs to the cookiecutter, you can create a
+ `cookiecutterrc` file in your home directory to set default values for the prompts.
+
+ For example, if you want to set the default value for your name and email, and the
+ default stream type and authentication method, you can add the following to your
+ `~/.cookiecutterrc` file:
+
+ ```yaml
+ # ~/.cookiecutterrc
+ default_context:
+ admin_name: Johnny B. Goode
+ admin_email: jbg@example.com
+ stream_type: REST
+ auth_method: Bearer Token
+ ```
+````
+
### Using an existing library
In some cases, there may already be a library that connects to the API and all you need the SDK for
@@ -239,13 +257,14 @@ We've had success using [`viztracer`](https://github.com/gaogaotiantian/viztrace
You can start doing the same in your package. Start by installing `viztracer`.
```console
-$ poetry add --dev viztracer
+$ poetry add --group dev viztracer
```
Then simply run your package's CLI as normal, preceded by the `viztracer` command
```console
$ poetry run viztracer my-tap
+$ poetry run viztracer -- my-target --config=config.json --input=messages.json
```
That command will produce a `result.json` file which you can explore with the `vizviewer` tool.
diff --git a/docs/stream_maps.md b/docs/stream_maps.md
index e4119d6400..8d84d9ea09 100644
--- a/docs/stream_maps.md
+++ b/docs/stream_maps.md
@@ -47,6 +47,24 @@ three distinct fields:
- `user__last_name`
- `user__id`
+#### Flattening Example
+
+````{tab} meltano.yml
+```yaml
+flattening_enabled: true
+flattening_max_depth: 1 # flatten only top-level properties
+```
+````
+
+````{tab} JSON
+```json
+{
+ "flattening_enabled": true,
+ "flattening_max_depth": 1
+}
+```
+````
+
## Out-of-scope capabilities
These capabilities are all out of scope _by design_:
@@ -155,6 +173,32 @@ Expressions are defined and parsed using the
accepts most native python expressions and is extended by custom functions which have been declared
within the SDK.
+#### Compound Expressions
+
+Starting in version 0.33.0, the SDK supports the use of simple comprehensions, e.g. `[x + 1 for x in [1,2,3]]`. This is a powerful feature which allows you to perform complex transformations on lists of values. For example, you can use comprehensions to filter out values in an array:
+
+````{tab} meltano.yml
+```yaml
+stream_maps:
+ users:
+ id: id
+ fields: "[f for f in fields if f['key'] != 'age']"
+```
+````
+
+````{tab} JSON
+```json
+{
+ "stream_maps": {
+ "users": {
+ "id": "id",
+ "fields": "[f for f in fields if f['key'] != 'age']"
+ }
+ }
+}
+```
+````
+
### Accessing Stream Properties within Mapping Expressions
By default, all stream properties are made available via the property's given name. For
diff --git a/e2e-tests/cookiecutters/mapper-base.json b/e2e-tests/cookiecutters/mapper-base.json
new file mode 100644
index 0000000000..390e8a7bad
--- /dev/null
+++ b/e2e-tests/cookiecutters/mapper-base.json
@@ -0,0 +1,14 @@
+{
+ "cookiecutter": {
+ "name": "MyMapperName",
+ "admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
+ "mapper_id": "mapper-base",
+ "library_name": "mapper_base",
+ "variant": "None (Skip)",
+ "include_ci_files": "None (Skip)",
+ "license": "Apache-2.0",
+ "_template": "../mapper-template/",
+ "_output_dir": "."
+ }
+}
diff --git a/e2e-tests/cookiecutters/tap-graphql-jwt.json b/e2e-tests/cookiecutters/tap-graphql-jwt.json
index 0c322e06f2..5daf4ab8fb 100644
--- a/e2e-tests/cookiecutters/tap-graphql-jwt.json
+++ b/e2e-tests/cookiecutters/tap-graphql-jwt.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "GraphQLJWTTemplateTest",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-graphql-jwt",
"library_name": "tap_graphql_jwt",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-other-custom.json b/e2e-tests/cookiecutters/tap-other-custom.json
index ac3816774e..3ea01eaf4c 100644
--- a/e2e-tests/cookiecutters/tap-other-custom.json
+++ b/e2e-tests/cookiecutters/tap-other-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-other-custom",
"library_name": "tap_other_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-api_key-github.json b/e2e-tests/cookiecutters/tap-rest-api_key-github.json
index e659819408..01570aba82 100644
--- a/e2e-tests/cookiecutters/tap-rest-api_key-github.json
+++ b/e2e-tests/cookiecutters/tap-rest-api_key-github.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-api_key-github",
"library_name": "tap_rest_api_key_github",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-basic_auth.json b/e2e-tests/cookiecutters/tap-rest-basic_auth.json
index 33eb7b625e..6c7d7fa190 100644
--- a/e2e-tests/cookiecutters/tap-rest-basic_auth.json
+++ b/e2e-tests/cookiecutters/tap-rest-basic_auth.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-basic_auth",
"library_name": "tap_rest_basic_auth",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-bearer_token.json b/e2e-tests/cookiecutters/tap-rest-bearer_token.json
index f506061dda..1574574629 100644
--- a/e2e-tests/cookiecutters/tap-rest-bearer_token.json
+++ b/e2e-tests/cookiecutters/tap-rest-bearer_token.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-bearer_token",
"library_name": "tap_rest_bearer_token",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-custom.json b/e2e-tests/cookiecutters/tap-rest-custom.json
index 5d68d60bf7..831135b7a7 100644
--- a/e2e-tests/cookiecutters/tap-rest-custom.json
+++ b/e2e-tests/cookiecutters/tap-rest-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-custom",
"library_name": "tap_rest_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-jwt.json b/e2e-tests/cookiecutters/tap-rest-jwt.json
index 80837f2441..b46807d491 100644
--- a/e2e-tests/cookiecutters/tap-rest-jwt.json
+++ b/e2e-tests/cookiecutters/tap-rest-jwt.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-jwt",
"library_name": "tap_rest_jwt",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-oauth2.json b/e2e-tests/cookiecutters/tap-rest-oauth2.json
index 27c7c39df5..4a41b80e3e 100644
--- a/e2e-tests/cookiecutters/tap-rest-oauth2.json
+++ b/e2e-tests/cookiecutters/tap-rest-oauth2.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-oauth2",
"library_name": "tap_rest_oauth2",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-sql-custom.json b/e2e-tests/cookiecutters/tap-sql-custom.json
index 96fa379d74..3c59968607 100644
--- a/e2e-tests/cookiecutters/tap-sql-custom.json
+++ b/e2e-tests/cookiecutters/tap-sql-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-sql-custom",
"library_name": "tap_sql_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/target-per_record.json b/e2e-tests/cookiecutters/target-per_record.json
index 9e0047af17..f5dde1cef0 100644
--- a/e2e-tests/cookiecutters/target-per_record.json
+++ b/e2e-tests/cookiecutters/target-per_record.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-per_record",
"library_name": "target_per_record",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/target-sql.json b/e2e-tests/cookiecutters/target-sql.json
index 5802e5edee..63691d7188 100644
--- a/e2e-tests/cookiecutters/target-sql.json
+++ b/e2e-tests/cookiecutters/target-sql.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-sql",
"library_name": "target_sql",
"variant": "None (Skip)",
diff --git a/noxfile.py b/noxfile.py
index 4c49494133..1a98cf8531 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import glob
import os
import shutil
import sys
@@ -23,28 +22,33 @@
RUFF_OVERRIDES = """\
extend = "./pyproject.toml"
-extend-ignore = ["TD002", "TD003"]
+extend-ignore = ["TD002", "TD003", "FIX002"]
"""
+COOKIECUTTER_REPLAY_FILES = list(Path("./e2e-tests/cookiecutters").glob("*.json"))
+
package = "singer_sdk"
python_versions = ["3.11", "3.10", "3.9", "3.8", "3.7"]
-main_python_version = "3.10"
+main_python_version = "3.11"
locations = "singer_sdk", "tests", "noxfile.py", "docs/conf.py"
nox.options.sessions = (
"mypy",
"tests",
+ "benches",
"doctest",
"test_cookiecutter",
)
test_dependencies = [
"coverage[toml]",
+ "duckdb",
+ "duckdb-engine",
"pytest",
- "pytest-snapshot",
+ "pytest-benchmark",
"pytest-durations",
- "freezegun",
- "pandas",
+ "pytest-snapshot",
"pyarrow",
"requests-mock",
+ "time-machine",
# Cookiecutter tests
"black",
"cookiecutter",
@@ -85,6 +89,14 @@ def tests(session: Session) -> None:
session.install(".[s3]")
session.install(*test_dependencies)
+ sqlalchemy_version = os.environ.get("SQLALCHEMY_VERSION")
+ if sqlalchemy_version:
+ # Bypass nox-poetry use of --constraint so we can install a version of
+ # SQLAlchemy that doesn't match what's in poetry.lock.
+ session.poetry.session.install( # type: ignore[attr-defined]
+ f"sqlalchemy=={sqlalchemy_version}",
+ )
+
try:
session.run(
"coverage",
@@ -94,16 +106,34 @@ def tests(session: Session) -> None:
"pytest",
"-v",
"--durations=10",
+ "--benchmark-skip",
*session.posargs,
- env={
- "SQLALCHEMY_WARN_20": "1",
- },
)
finally:
if session.interactive:
session.notify("coverage", posargs=[])
+@session(python=main_python_version)
+def benches(session: Session) -> None:
+ """Run benchmarks."""
+ session.install(".[s3]")
+ session.install(*test_dependencies)
+ sqlalchemy_version = os.environ.get("SQLALCHEMY_VERSION")
+ if sqlalchemy_version:
+ # Bypass nox-poetry use of --constraint so we can install a version of
+ # SQLAlchemy that doesn't match what's in poetry.lock.
+ session.poetry.session.install( # type: ignore[attr-defined]
+ f"sqlalchemy=={sqlalchemy_version}",
+ )
+ session.run(
+ "pytest",
+ "--benchmark-only",
+ "--benchmark-json=output.json",
+ *session.posargs,
+ )
+
+
@session(python=main_python_version)
def update_snapshots(session: Session) -> None:
"""Update pytest snapshots."""
@@ -180,36 +210,37 @@ def docs_serve(session: Session) -> None:
session.run("sphinx-autobuild", *args)
-@nox.parametrize("replay_file_path", glob.glob("./e2e-tests/cookiecutters/*.json"))
+@nox.parametrize("replay_file_path", COOKIECUTTER_REPLAY_FILES)
@session(python=main_python_version)
-def test_cookiecutter(session: Session, replay_file_path) -> None:
+def test_cookiecutter(session: Session, replay_file_path: str) -> None:
"""Uses the tap template to build an empty cookiecutter.
Runs the lint task on the created test project.
"""
- cc_build_path = tempfile.gettempdir()
- folder_base_path = "./cookiecutter"
-
- target_folder = (
- "tap-template"
- if Path(replay_file_path).name.startswith("tap")
- else "target-template"
- )
- tap_template = Path(folder_base_path + "/" + target_folder).resolve()
+ cc_build_path = Path(tempfile.gettempdir())
+ folder_base_path = Path("./cookiecutter")
replay_file = Path(replay_file_path).resolve()
- if not Path(tap_template).exists():
+ if replay_file.name.startswith("tap"):
+ folder = "tap-template"
+ elif replay_file.name.startswith("target"):
+ folder = "target-template"
+ else:
+ folder = "mapper-template"
+ template = folder_base_path.joinpath(folder).resolve()
+
+ if not template.exists():
return
- if not Path(replay_file).is_file():
+ if not replay_file.is_file():
return
- sdk_dir = Path(Path(tap_template).parent).parent
- cc_output_dir = Path(replay_file_path).name.replace(".json", "")
- cc_test_output = cc_build_path + "/" + cc_output_dir
+ sdk_dir = template.parent.parent
+ cc_output_dir = replay_file.name.replace(".json", "")
+ cc_test_output = cc_build_path.joinpath(cc_output_dir)
- if Path(cc_test_output).exists():
- session.run("rm", "-fr", cc_test_output, external=True)
+ if cc_test_output.exists():
+ session.run("rm", "-fr", str(cc_test_output), external=True)
session.install(".")
session.install("cookiecutter", "pythonsed")
@@ -218,9 +249,9 @@ def test_cookiecutter(session: Session, replay_file_path) -> None:
"cookiecutter",
"--replay-file",
str(replay_file),
- str(tap_template),
+ str(template),
"-o",
- cc_build_path,
+ str(cc_build_path),
)
session.chdir(cc_test_output)
@@ -241,3 +272,25 @@ def test_cookiecutter(session: Session, replay_file_path) -> None:
session.run("git", "init", external=True)
session.run("git", "add", ".", external=True)
session.run("pre-commit", "run", "--all-files", external=True)
+
+
+@session(name="version-bump")
+def version_bump(session: Session) -> None:
+ """Run commitizen."""
+ session.install(
+ "commitizen",
+ "commitizen-version-bump @ git+https://github.com/meltano/commitizen-version-bump.git@main",
+ )
+ default_args = [
+ "--changelog",
+ "--files-only",
+ "--check-consistency",
+ "--changelog-to-stdout",
+ ]
+ args = session.posargs or default_args
+
+ session.run(
+ "cz",
+ "bump",
+ *args,
+ )
diff --git a/poetry.lock b/poetry.lock
index 9334d66453..7ddfd6de23 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -22,23 +22,6 @@ files = [
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
]
-[[package]]
-name = "argcomplete"
-version = "3.0.8"
-description = "Bash tab completion for argparse"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "argcomplete-3.0.8-py3-none-any.whl", hash = "sha256:e36fd646839933cbec7941c662ecb65338248667358dd3d968405a4506a60d9b"},
- {file = "argcomplete-3.0.8.tar.gz", hash = "sha256:b9ca96448e14fa459d7450a4ab5a22bbf9cee4ba7adddf03e65c398b5daeea28"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=0.23,<7", markers = "python_version == \"3.7\""}
-
-[package.extras]
-test = ["coverage", "mypy", "pexpect", "ruff", "wheel"]
-
[[package]]
name = "arrow"
version = "1.2.3"
@@ -77,17 +60,21 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte
[[package]]
name = "babel"
-version = "2.12.1"
+version = "2.13.1"
description = "Internationalization utilities"
optional = true
python-versions = ">=3.7"
files = [
- {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
- {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
+ {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"},
+ {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"},
]
[package.dependencies]
pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
+setuptools = {version = "*", markers = "python_version >= \"3.12\""}
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "backoff"
@@ -100,6 +87,46 @@ files = [
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
]
+[[package]]
+name = "backports-datetime-fromisoformat"
+version = "2.0.1"
+description = "Backport of Python 3.11's datetime.fromisoformat"
+optional = false
+python-versions = ">3"
+files = [
+ {file = "backports-datetime-fromisoformat-2.0.1.tar.gz", hash = "sha256:1b6afca7f47019c22df43062cde73c1af65fbdebc66520f352c690d52fd27127"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b739ccd3f36244f618f1fbc21d89894d9dc9d1d75a68762fcf917d433df38ae3"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:afd072ca32f2ca4e838e0f7b61a56168d98837ee9a182c567a49a834e07c2b98"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a136d85f8b1db4747aa9e56a8caa0ba77c5c25b761b18e2169ea7b1b516f012"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d3a0579958ade7db62c8238163e05d46a4de61c99cebb40031ed7409a44d5f6"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:199df62af8feff5da0f4953fdc4a6994bcd7dbfe1db95901d8b93d05feda2ab5"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afe32e60a471831058ede14fc226d9f14120e6dc67d66fbbd36e1724826ad70b"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:a1ba7e226a9694b20b713867f71b5ed2f662603c39875f14f968608d331fc96a"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:403f155deecbe94d43d0679a74abb5c9ac441422a9ececcfde030fb133865659"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d2ee049997d3aa2e714489cb3c34864fb0f25786e7a4ff04ac9d82af58b453"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:20aa422769af9f72ca41d83238d4a3a008d6cd74bcff0a08befb11b0018d6aa5"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8ea8d85c3c9afa4ad51b6644d26516d43493f44c2131c12a2ba959433f4417f6"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:812b8c34e88a7d9615c604f1a0473a4e6d664aba94086bffb0c55627f9e3fb68"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:df5365930320b7a9d404cd6f7bc13988e28355e812aa42e21aa5c93443dcdd2e"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe3e3968c8dce4a44da2da81a6031e992a4ee62d130c2536696d215a4db2ce3c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a4abb678ab0d6a1965d70e21e424bcf7a52086a7afb1c5f13243a3d44fa2dd"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96b7e806ade09a91d8ce195c197fc799d8fbe6b8ea9cde21f8a01f1090e51e33"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:002a77bd4f549ff5e80f1ef4a9b69982746dd6190786b90abe3d9c69c9883ce4"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7b4ad0013a96b656ebf85079921ffb211623a1e28ff4981b3927690a2ed6df54"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:065421723e735ce8f68dbb4486f07562ce8556ed543ceaa012189b9aa209f303"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a4bf1bec08bc84095ee379202466c948fe12cff1442f58ee1a91fac4c5164c97"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1836aff09b8317e179cc7288856b61a450515d4b411f0ab723dc9692dfa5362e"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:815f85a581b142bcf34632c3ce26f7e21003f101ce88b5649631330e334bbe35"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a6986cfd3bc40b41465a6c54c18a30ca8110333d0b71f6062af136db11c8ff0"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:82741e732d71f78b44a8c3b95f33b3630e7bfbdb02e3fede3938cdf15d5b6a83"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eac27abb51ee84e08d1dd1e908c16cae2078c217ff5b54092e6cb92107b4c6c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3b730d72061523be9600bcd281ef353f7f73b1df095adbbdc364aac8f430c44c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e8f28f4a68539192473f427ed86794931502d186e2fffa1926250550c1335a"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef151f1df77e413dc179607edb5bee11949ca5890e81c0bb742d96fec753fe"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28c95d6df2a44fa3540e18e484596c03e8ff7112e2f93b664f482fe3a88720b"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91042b53de903e3725209ad6d69b6994ae4819614c0decd62d05dfea23f35e2b"},
+]
+
[[package]]
name = "beautifulsoup4"
version = "4.12.2"
@@ -132,103 +159,56 @@ files = [
[package.dependencies]
chardet = ">=3.0.2"
-[[package]]
-name = "black"
-version = "23.3.0"
-description = "The uncompromising code formatter."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
- {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
- {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
- {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
- {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
- {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
- {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
- {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
- {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
- {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
- {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
- {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
- {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
- {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-packaging = ">=22.0"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
-typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
[[package]]
name = "boto3"
-version = "1.26.157"
+version = "1.29.6"
description = "The AWS SDK for Python"
optional = true
python-versions = ">= 3.7"
files = [
- {file = "boto3-1.26.157-py3-none-any.whl", hash = "sha256:718b236aafc3f106d17cd5c4f513fc2f40bfa995c0cb730ecc893e9c808c0385"},
- {file = "boto3-1.26.157.tar.gz", hash = "sha256:7a8117dfe9ba1f203d73b3df32a4ebdb895813189635f126fa256e1dea37ee8d"},
+ {file = "boto3-1.29.6-py3-none-any.whl", hash = "sha256:f4d19e01d176c3a5a05e4af733185ff1891b08a3c38d4a439800fa132aa6e9be"},
+ {file = "boto3-1.29.6.tar.gz", hash = "sha256:d1d0d979a70bf9b0b13ae3b017f8523708ad953f62d16f39a602d67ee9b25554"},
]
[package.dependencies]
-botocore = ">=1.29.157,<1.30.0"
+botocore = ">=1.32.6,<1.33.0"
jmespath = ">=0.7.1,<2.0.0"
-s3transfer = ">=0.6.0,<0.7.0"
+s3transfer = ">=0.7.0,<0.8.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.29.157"
+version = "1.32.6"
description = "Low-level, data-driven core of boto 3."
optional = true
python-versions = ">= 3.7"
files = [
- {file = "botocore-1.29.157-py3-none-any.whl", hash = "sha256:ccbf948c040d68b6a22570e73dd63cb3b07ce33f4032e9b1d502d2fae55c3b80"},
- {file = "botocore-1.29.157.tar.gz", hash = "sha256:af2a7b6417bf3bbf00ab22aa61a2d7d839a8a8a62e7975c18c80c55c88dc7fcf"},
+ {file = "botocore-1.32.6-py3-none-any.whl", hash = "sha256:4454f967a4d1a01e3e6205c070455bc4e8fd53b5b0753221581ae679c55a9dfd"},
+ {file = "botocore-1.32.6.tar.gz", hash = "sha256:ecec876103783b5efe6099762dda60c2af67e45f7c0ab4568e8265d11c6c449b"},
]
[package.dependencies]
jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
-urllib3 = ">=1.25.4,<1.27"
+urllib3 = [
+ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""},
+ {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""},
+]
[package.extras]
-crt = ["awscrt (==0.16.9)"]
+crt = ["awscrt (==0.19.12)"]
[[package]]
name = "certifi"
-version = "2023.5.7"
+version = "2023.11.17"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
- {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
+ {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"},
+ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"},
]
[[package]]
@@ -309,108 +289,123 @@ pycparser = "*"
[[package]]
name = "chardet"
-version = "5.1.0"
+version = "5.2.0"
description = "Universal encoding detector for Python 3"
optional = false
python-versions = ">=3.7"
files = [
- {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
- {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
+ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"},
+ {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
]
[[package]]
name = "charset-normalizer"
-version = "3.1.0"
+version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
- {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "click"
-version = "8.1.6"
+version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"},
- {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"},
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
@@ -428,59 +423,15 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-[[package]]
-name = "commitizen"
-version = "3.4.0"
-description = "Python commitizen client tool"
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "commitizen-3.4.0-py3-none-any.whl", hash = "sha256:5c58052099a6512da66a893f09e98e1f0d94ed02720a4e8d5747d4d409d59cfb"},
- {file = "commitizen-3.4.0.tar.gz", hash = "sha256:ab17db8c4f7258d9cdcc620046aa63d2139756ef78b2174cfa9f9c5e383eaf27"},
-]
-
-[package.dependencies]
-argcomplete = ">=1.12.1,<3.1"
-charset-normalizer = ">=2.1.0,<4"
-colorama = ">=0.4.1,<0.5.0"
-decli = ">=0.6.0,<0.7.0"
-importlib_metadata = ">=4.13,<7"
-jinja2 = ">=2.10.3"
-packaging = ">=19"
-pyyaml = ">=3.08"
-questionary = ">=1.4.0,<2.0.0"
-termcolor = ">=1.1,<3"
-tomlkit = ">=0.5.3,<1.0.0"
-typing-extensions = {version = ">=4.0.1,<5.0.0", markers = "python_version < \"3.8\""}
-
-[[package]]
-name = "commitizen-version-bump"
-version = "0.1.0"
-description = "Commitizen customized for Meltano projects (https://commitizen-tools.github.io/commitizen/customization)"
-optional = false
-python-versions = "^3.7"
-files = []
-develop = false
-
-[package.dependencies]
-commitizen = ">=3.0.0,<4.0.0"
-PyGithub = "^1.57"
-
-[package.source]
-type = "git"
-url = "https://github.com/meltano/commitizen-version-bump.git"
-reference = "main"
-resolved_reference = "e2e6d5d13d39eae1f37e3a275c0d3d3e38c18439"
-
[[package]]
name = "cookiecutter"
-version = "2.2.2"
+version = "2.5.0"
description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cookiecutter-2.2.2-py3-none-any.whl", hash = "sha256:4feb7485520dd7453e3094d8f3955601156a0fab0d0b90a2c8c74f6dc2cbaac6"},
- {file = "cookiecutter-2.2.2.tar.gz", hash = "sha256:3b475d17573a7785b4a22fab693be249840e235a92c93c0fa088b39e9193f194"},
+ {file = "cookiecutter-2.5.0-py3-none-any.whl", hash = "sha256:8aa2f12ed11bc05628651e9dc4353a10571dd9908aaaaeec959a2b9ea465a5d2"},
+ {file = "cookiecutter-2.5.0.tar.gz", hash = "sha256:e61e9034748e3f41b8bd2c11f00d030784b48711c4d5c42363c50989a65331ec"},
]
[package.dependencies]
@@ -491,6 +442,7 @@ Jinja2 = ">=2.7,<4.0.0"
python-slugify = ">=4.0.0"
pyyaml = ">=5.3.1"
requests = ">=2.23.0"
+rich = "*"
[[package]]
name = "coverage"
@@ -569,34 +521,34 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
-version = "41.0.2"
+version = "41.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"},
- {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"},
- {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"},
- {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"},
- {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"},
- {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"},
- {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"},
+ {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"},
+ {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"},
+ {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"},
+ {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"},
+ {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"},
+ {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"},
+ {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"},
]
[package.dependencies]
@@ -612,56 +564,6 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
-[[package]]
-name = "darglint"
-version = "1.8.1"
-description = "A utility for ensuring Google-style docstrings stay up to date with the source code."
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"},
- {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
-]
-
-[[package]]
-name = "decli"
-version = "0.6.1"
-description = "Minimal, easy-to-use, declarative cli tool"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "decli-0.6.1-py3-none-any.whl", hash = "sha256:7815ac58617764e1a200d7cadac6315fcaacc24d727d182f9878dd6378ccf869"},
- {file = "decli-0.6.1.tar.gz", hash = "sha256:ed88ccb947701e8e5509b7945fda56e150e2ac74a69f25d47ac85ef30ab0c0f0"},
-]
-
-[[package]]
-name = "decorator"
-version = "5.1.1"
-description = "Decorators for Humans"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
- {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
-]
-
-[[package]]
-name = "deprecated"
-version = "1.2.14"
-description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
- {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
-]
-
-[package.dependencies]
-wrapt = ">=1.10,<2"
-
-[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
-
[[package]]
name = "docutils"
version = "0.19"
@@ -674,80 +576,96 @@ files = [
]
[[package]]
-name = "exceptiongroup"
-version = "1.1.1"
-description = "Backport of PEP 654 (exception groups)"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
- {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
-]
-
-[package.extras]
-test = ["pytest (>=6)"]
-
-[[package]]
-name = "flake8"
-version = "3.9.2"
-description = "the modular source code checker: pep8 pyflakes and co"
+name = "duckdb"
+version = "0.9.2"
+description = "DuckDB embedded database"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7.0"
files = [
- {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
- {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-mccabe = ">=0.6.0,<0.7.0"
-pycodestyle = ">=2.7.0,<2.8.0"
-pyflakes = ">=2.3.0,<2.4.0"
-
-[[package]]
-name = "flake8-annotations"
-version = "2.9.1"
-description = "Flake8 Type Annotation Checks"
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"},
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"},
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"},
+ {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"},
+ {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"},
+ {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"},
+ {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"},
+ {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"},
+ {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"},
+ {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"},
+ {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"},
+ {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"},
+ {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"},
+ {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"},
+ {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"},
+ {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"},
+ {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"},
+ {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"},
+ {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"},
+ {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"},
+ {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"},
+ {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"},
+ {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"},
+ {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"},
+ {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"},
+ {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"},
+ {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"},
+ {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"},
+ {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"},
+ {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"},
+]
+
+[[package]]
+name = "duckdb-engine"
+version = "0.9.2"
+description = "SQLAlchemy driver for duckdb"
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.7"
files = [
- {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"},
- {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"},
+ {file = "duckdb_engine-0.9.2-py3-none-any.whl", hash = "sha256:764e83dfb37e2f0ce6afcb8e701299e7b28060a40fdae86cfd7f08e0fca4496a"},
+ {file = "duckdb_engine-0.9.2.tar.gz", hash = "sha256:efcd7b468f9b17e4480a97f0c60eade25cc081e8cfc04c46d63828677964b48f"},
]
[package.dependencies]
-attrs = ">=21.4"
-flake8 = ">=3.7"
-typed-ast = {version = ">=1.4,<2.0", markers = "python_version < \"3.8\""}
+duckdb = ">=0.4.0"
+sqlalchemy = ">=1.3.22"
[[package]]
-name = "flake8-docstrings"
-version = "1.7.0"
-description = "Extension for flake8 which uses pydocstyle to check docstrings"
+name = "exceptiongroup"
+version = "1.2.0"
+description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"},
- {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"},
+ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
+ {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
]
-[package.dependencies]
-flake8 = ">=3"
-pydocstyle = ">=2.1"
+[package.extras]
+test = ["pytest (>=6)"]
[[package]]
-name = "freezegun"
-version = "1.2.2"
-description = "Let your Python tests travel through time"
+name = "filelock"
+version = "3.12.2"
+description = "A platform independent file lock."
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"},
- {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"},
+ {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"},
+ {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"},
]
-[package.dependencies]
-python-dateutil = ">=2.7"
+[package.extras]
+docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
[[package]]
name = "fs"
@@ -803,75 +721,72 @@ sphinx-basic-ng = "*"
[[package]]
name = "greenlet"
-version = "2.0.2"
+version = "3.0.1"
description = "Lightweight in-process concurrent programming"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
-files = [
- {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
- {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
- {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
- {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
- {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
- {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
- {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
- {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
- {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
- {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
- {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
- {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
- {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
- {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
- {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
- {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
- {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
- {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
- {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
- {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
- {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
- {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
- {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
- {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
- {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
- {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
- {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
- {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
- {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
- {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
- {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
- {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
- {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
- {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
- {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
- {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
- {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
- {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
+python-versions = ">=3.7"
+files = [
+ {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"},
+ {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"},
+ {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"},
+ {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"},
+ {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"},
+ {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"},
+ {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"},
+ {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"},
+ {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"},
+ {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"},
+ {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"},
+ {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"},
+ {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"},
+ {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"},
+ {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"},
+ {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"},
+ {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"},
+ {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"},
+ {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"},
+ {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"},
+ {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"},
+ {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"},
+ {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"},
+ {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"},
+ {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"},
+ {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"},
+ {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"},
]
[package.extras]
-docs = ["Sphinx", "docutils (<0.18)"]
+docs = ["Sphinx"]
test = ["objgraph", "psutil"]
[[package]]
@@ -898,13 +813,13 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "4.13.0"
+version = "6.7.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
- {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
+ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"},
+ {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"},
]
[package.dependencies]
@@ -912,9 +827,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
[[package]]
name = "importlib-resources"
@@ -986,31 +901,28 @@ files = [
[[package]]
name = "joblib"
-version = "1.3.1"
+version = "1.3.2"
description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.7"
files = [
- {file = "joblib-1.3.1-py3-none-any.whl", hash = "sha256:89cf0529520e01b3de7ac7b74a8102c90d16d54c64b5dd98cafcd14307fdf915"},
- {file = "joblib-1.3.1.tar.gz", hash = "sha256:1f937906df65329ba98013dc9692fe22a4c5e4a648112de500508b18a21b41e3"},
+ {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"},
+ {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"},
]
[[package]]
name = "jsonpath-ng"
-version = "1.5.3"
+version = "1.6.0"
description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming."
optional = false
python-versions = "*"
files = [
- {file = "jsonpath-ng-1.5.3.tar.gz", hash = "sha256:a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567"},
- {file = "jsonpath_ng-1.5.3-py2-none-any.whl", hash = "sha256:f75b95dbecb8a0f3b86fd2ead21c2b022c3f5770957492b9b6196ecccfeb10aa"},
- {file = "jsonpath_ng-1.5.3-py3-none-any.whl", hash = "sha256:292a93569d74029ba75ac2dc3d3630fc0e17b2df26119a165fa1d498ca47bf65"},
+ {file = "jsonpath-ng-1.6.0.tar.gz", hash = "sha256:5483f8e9d74c39c9abfab554c070ae783c1c8cbadf5df60d561bc705ac68a07e"},
+ {file = "jsonpath_ng-1.6.0-py3-none-any.whl", hash = "sha256:6fd04833412c4b3d9299edf369542f5e67095ca84efa17cbb7f06a34958adc9f"},
]
[package.dependencies]
-decorator = "*"
ply = "*"
-six = "*"
[[package]]
name = "jsonschema"
@@ -1054,7 +966,7 @@ tornado = {version = "*", markers = "python_version > \"2.7\""}
name = "markdown-it-py"
version = "2.2.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
-optional = true
+optional = false
python-versions = ">=3.7"
files = [
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
@@ -1102,6 +1014,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1134,17 +1056,6 @@ files = [
{file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
]
-[[package]]
-name = "mccabe"
-version = "0.6.1"
-description = "McCabe checker, plugin for flake8"
-optional = false
-python-versions = "*"
-files = [
- {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
- {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
-]
-
[[package]]
name = "mdit-py-plugins"
version = "0.3.5"
@@ -1168,7 +1079,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
name = "mdurl"
version = "0.1.2"
description = "Markdown URL utilities"
-optional = true
+optional = false
python-versions = ">=3.7"
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
@@ -1312,61 +1223,50 @@ files = [
[[package]]
name = "numpy"
-version = "1.24.3"
+version = "1.24.4"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"},
- {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"},
- {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"},
- {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"},
- {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"},
- {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"},
- {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"},
- {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"},
- {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"},
- {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"},
- {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"},
- {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"},
- {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"},
- {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"},
- {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"},
- {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"},
- {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"},
- {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"},
- {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"},
- {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"},
- {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"},
- {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"},
- {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"},
- {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"},
- {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"},
+ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"},
+ {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"},
+ {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"},
+ {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"},
+ {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"},
+ {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"},
+ {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"},
+ {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"},
+ {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"},
+ {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"},
+ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"},
]
[[package]]
name = "packaging"
-version = "23.1"
+version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
- {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
-]
-
-[[package]]
-name = "pathspec"
-version = "0.11.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
- {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
@@ -1414,33 +1314,15 @@ files = [
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
]
-[[package]]
-name = "platformdirs"
-version = "3.6.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "platformdirs-3.6.0-py3-none-any.whl", hash = "sha256:ffa199e3fbab8365778c4a10e1fbf1b9cd50707de826eb304b50e57ec0cc8d38"},
- {file = "platformdirs-3.6.0.tar.gz", hash = "sha256:57e28820ca8094678b807ff529196506d7a21e17156cb1cddb3e74cebce54640"},
-]
-
-[package.dependencies]
-typing-extensions = {version = ">=4.6.3", markers = "python_version < \"3.8\""}
-
-[package.extras]
-docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"]
-
[[package]]
name = "pluggy"
-version = "1.0.0"
+version = "1.2.0"
description = "plugin and hook calling mechanisms for python"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
+ {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
]
[package.dependencies]
@@ -1462,19 +1344,16 @@ files = [
]
[[package]]
-name = "prompt-toolkit"
-version = "3.0.38"
-description = "Library for building powerful interactive command lines in Python"
+name = "py-cpuinfo"
+version = "9.0.0"
+description = "Get CPU info with pure Python"
optional = false
-python-versions = ">=3.7.0"
+python-versions = "*"
files = [
- {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"},
- {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"},
+ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
+ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
]
-[package.dependencies]
-wcwidth = "*"
-
[[package]]
name = "pyarrow"
version = "12.0.1"
@@ -1513,16 +1392,53 @@ files = [
numpy = ">=1.16.6"
[[package]]
-name = "pycodestyle"
-version = "2.7.0"
-description = "Python style guide checker"
+name = "pyarrow"
+version = "14.0.1"
+description = "Python library for Apache Arrow"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.8"
files = [
- {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
- {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
+ {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"},
+ {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"},
+ {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"},
+ {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"},
+ {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"},
+ {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"},
+ {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"},
+ {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"},
+ {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"},
+ {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"},
+ {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"},
+ {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"},
+ {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"},
+ {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"},
+ {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"},
+ {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"},
+ {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"},
+ {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"},
+ {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"},
+ {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"},
+ {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"},
+ {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"},
+ {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"},
+ {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"},
+ {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"},
+ {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"},
+ {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"},
+ {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"},
+ {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"},
+ {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"},
+ {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"},
+ {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"},
+ {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"},
+ {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"},
+ {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"},
+ {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"},
]
+[package.dependencies]
+numpy = ">=1.16.6"
+
[[package]]
name = "pycparser"
version = "2.21"
@@ -1534,65 +1450,20 @@ files = [
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
-[[package]]
-name = "pydocstyle"
-version = "6.3.0"
-description = "Python docstring style checker"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"},
- {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=2.0.0,<5.0.0", markers = "python_version < \"3.8\""}
-snowballstemmer = ">=2.2.0"
-
-[package.extras]
-toml = ["tomli (>=1.2.3)"]
-
-[[package]]
-name = "pyflakes"
-version = "2.3.1"
-description = "passive checker of Python programs"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
- {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
-]
-
-[[package]]
-name = "pygithub"
-version = "1.58.2"
-description = "Use the full Github API v3"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "PyGithub-1.58.2-py3-none-any.whl", hash = "sha256:f435884af617c6debaa76cbc355372d1027445a56fbc39972a3b9ed4968badc8"},
- {file = "PyGithub-1.58.2.tar.gz", hash = "sha256:1e6b1b7afe31f75151fb81f7ab6b984a7188a852bdb123dbb9ae90023c3ce60f"},
-]
-
-[package.dependencies]
-deprecated = "*"
-pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
-pynacl = ">=1.4.0"
-requests = ">=2.14.0"
-
[[package]]
name = "pygments"
-version = "2.15.1"
+version = "2.17.2"
description = "Pygments is a syntax highlighting package written in Python."
-optional = true
+optional = false
python-versions = ">=3.7"
files = [
- {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
- {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
]
[package.extras]
plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pyjwt"
@@ -1606,7 +1477,6 @@ files = [
]
[package.dependencies]
-cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
typing-extensions = {version = "*", markers = "python_version <= \"3.7\""}
[package.extras]
@@ -1615,32 +1485,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
-[[package]]
-name = "pynacl"
-version = "1.5.0"
-description = "Python binding to the Networking and Cryptography (NaCl) library"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
- {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
- {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
- {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
- {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
- {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
-]
-
-[package.dependencies]
-cffi = ">=1.4.1"
-
-[package.extras]
-docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
-tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
-
[[package]]
name = "pyrsistent"
version = "0.19.3"
@@ -1679,13 +1523,13 @@ files = [
[[package]]
name = "pytest"
-version = "7.4.0"
+version = "7.4.3"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
- {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
+ {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
+ {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
]
[package.dependencies]
@@ -1700,6 +1544,48 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+[[package]]
+name = "pytest-benchmark"
+version = "4.0.0"
+description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"},
+ {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"},
+]
+
+[package.dependencies]
+py-cpuinfo = "*"
+pytest = ">=3.8"
+
+[package.extras]
+aspect = ["aspectlib"]
+elasticsearch = ["elasticsearch"]
+histogram = ["pygal", "pygaljs"]
+
+[[package]]
+name = "pytest-codspeed"
+version = "2.2.0"
+description = "Pytest plugin to create CodSpeed benchmarks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest_codspeed-2.2.0-py3-none-any.whl", hash = "sha256:5da48b842fc465926d122dd15bb86e86af5d9f0c53ec1b7c736e9a9aed558c13"},
+ {file = "pytest_codspeed-2.2.0.tar.gz", hash = "sha256:665003fc20117b64a98d16ffd1008f5bd6bf3b1e9af142b98c00abff7f626bbd"},
+]
+
+[package.dependencies]
+cffi = ">=1.15.1,<1.16.0"
+filelock = ">=3.12.2,<3.13.0"
+pytest = ">=3.8"
+setuptools = {version = ">=67.8.0,<67.9.0", markers = "python_full_version >= \"3.12.0b1\""}
+
+[package.extras]
+compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"]
+lint = ["black (>=23.3.0,<23.4.0)", "isort (>=5.12.0,<5.13.0)", "mypy (>=1.3.0,<1.4.0)", "ruff (>=0.0.275,<0.1.0)"]
+test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"]
+
[[package]]
name = "pytest-durations"
version = "1.2.0"
@@ -1775,13 +1661,13 @@ unidecode = ["Unidecode (>=1.1.1)"]
[[package]]
name = "pytz"
-version = "2023.3"
+version = "2023.3.post1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
- {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
+ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
]
[[package]]
@@ -1807,6 +1693,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1814,8 +1701,15 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -1832,6 +1726,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -1839,28 +1734,12 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
-[[package]]
-name = "questionary"
-version = "1.10.0"
-description = "Python library to build pretty command line user prompts ⭐️"
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"},
- {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"},
-]
-
-[package.dependencies]
-prompt_toolkit = ">=2.0,<4.0"
-
-[package.extras]
-docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphinx-autodoc-typehints (>=1.11.1,<2.0.0)", "sphinx-copybutton (>=0.3.1,<0.4.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)"]
-
[[package]]
name = "requests"
version = "2.31.0"
@@ -1901,15 +1780,34 @@ six = "*"
fixture = ["fixtures"]
test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"]
+[[package]]
+name = "rich"
+version = "13.7.0"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"},
+ {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
[[package]]
name = "s3transfer"
-version = "0.6.1"
+version = "0.7.0"
description = "An Amazon S3 Transfer Manager"
optional = true
python-versions = ">= 3.7"
files = [
- {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"},
- {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"},
+ {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"},
+ {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"},
]
[package.dependencies]
@@ -1920,13 +1818,13 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "setuptools"
-version = "68.0.0"
+version = "67.8.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"},
- {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"},
+ {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"},
+ {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"},
]
[package.extras]
@@ -1934,98 +1832,122 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+[[package]]
+name = "simpleeval"
+version = "0.9.13"
+description = "A simple, safe single expression evaluator library."
+optional = false
+python-versions = "*"
+files = [
+ {file = "simpleeval-0.9.13-py2.py3-none-any.whl", hash = "sha256:22a2701a5006e4188d125d34accf2405c2c37c93f6b346f2484b6422415ae54a"},
+ {file = "simpleeval-0.9.13.tar.gz", hash = "sha256:4a30f9cc01825fe4c719c785e3762623e350c4840d5e6855c2a8496baaa65fac"},
+]
+
[[package]]
name = "simplejson"
-version = "3.19.1"
+version = "3.19.2"
description = "Simple, fast, extensible JSON encoder/decoder for Python"
optional = false
python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
- {file = "simplejson-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:412e58997a30c5deb8cab5858b8e2e5b40ca007079f7010ee74565cc13d19665"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e765b1f47293dedf77946f0427e03ee45def2862edacd8868c6cf9ab97c8afbd"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3231100edee292da78948fa0a77dee4e5a94a0a60bcba9ed7a9dc77f4d4bb11e"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:081ea6305b3b5e84ae7417e7f45956db5ea3872ec497a584ec86c3260cda049e"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f253edf694ce836631b350d758d00a8c4011243d58318fbfbe0dd54a6a839ab4"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:5db86bb82034e055257c8e45228ca3dbce85e38d7bfa84fa7b2838e032a3219c"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:69a8b10a4f81548bc1e06ded0c4a6c9042c0be0d947c53c1ed89703f7e613950"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:58ee5e24d6863b22194020eb62673cf8cc69945fcad6b283919490f6e359f7c5"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:73d0904c2471f317386d4ae5c665b16b5c50ab4f3ee7fd3d3b7651e564ad74b1"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:66d780047c31ff316ee305c3f7550f352d87257c756413632303fc59fef19eac"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd4d50a27b065447c9c399f0bf0a993bd0e6308db8bbbfbc3ea03b41c145775a"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c16ec6a67a5f66ab004190829eeede01c633936375edcad7cbf06d3241e5865"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a963e8dd4d81061cc05b627677c1f6a12e81345111fbdc5708c9f088d752c9"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e78d79b10aa92f40f54178ada2b635c960d24fc6141856b926d82f67e56d169"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad071cd84a636195f35fa71de2186d717db775f94f985232775794d09f8d9061"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e7c70f19405e5f99168077b785fe15fcb5f9b3c0b70b0b5c2757ce294922c8c"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54fca2b26bcd1c403146fd9461d1da76199442297160721b1d63def2a1b17799"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:48600a6e0032bed17c20319d91775f1797d39953ccfd68c27f83c8d7fc3b32cb"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:93f5ac30607157a0b2579af59a065bcfaa7fadeb4875bf927a8f8b6739c8d910"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b79642a599740603ca86cf9df54f57a2013c47e1dd4dd2ae4769af0a6816900"},
- {file = "simplejson-3.19.1-cp310-cp310-win32.whl", hash = "sha256:d9f2c27f18a0b94107d57294aab3d06d6046ea843ed4a45cae8bd45756749f3a"},
- {file = "simplejson-3.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:5673d27806085d2a413b3be5f85fad6fca4b7ffd31cfe510bbe65eea52fff571"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:79c748aa61fd8098d0472e776743de20fae2686edb80a24f0f6593a77f74fe86"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:390f4a8ca61d90bcf806c3ad644e05fa5890f5b9a72abdd4ca8430cdc1e386fa"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d61482b5d18181e6bb4810b4a6a24c63a490c3a20e9fbd7876639653e2b30a1a"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2541fdb7467ef9bfad1f55b6c52e8ea52b3ce4a0027d37aff094190a955daa9d"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46133bc7dd45c9953e6ee4852e3de3d5a9a4a03b068bd238935a5c72f0a1ce34"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f96def94576f857abf58e031ce881b5a3fc25cbec64b2bc4824824a8a4367af9"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f14ecca970d825df0d29d5c6736ff27999ee7bdf5510e807f7ad8845f7760ce"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:66389b6b6ee46a94a493a933a26008a1bae0cfadeca176933e7ff6556c0ce998"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:22b867205cd258050c2625325fdd9a65f917a5aff22a23387e245ecae4098e78"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c39fa911e4302eb79c804b221ddec775c3da08833c0a9120041dd322789824de"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65dafe413b15e8895ad42e49210b74a955c9ae65564952b0243a18fb35b986cc"},
- {file = "simplejson-3.19.1-cp311-cp311-win32.whl", hash = "sha256:f05d05d99fce5537d8f7a0af6417a9afa9af3a6c4bb1ba7359c53b6257625fcb"},
- {file = "simplejson-3.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:b46aaf0332a8a9c965310058cf3487d705bf672641d2c43a835625b326689cf4"},
- {file = "simplejson-3.19.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b438e5eaa474365f4faaeeef1ec3e8d5b4e7030706e3e3d6b5bee6049732e0e6"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9d614a612ad02492f704fbac636f666fa89295a5d22b4facf2d665fc3b5ea9"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46e89f58e4bed107626edce1cf098da3664a336d01fc78fddcfb1f397f553d44"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96ade243fb6f3b57e7bd3b71e90c190cd0f93ec5dce6bf38734a73a2e5fa274f"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed18728b90758d171f0c66c475c24a443ede815cf3f1a91e907b0db0ebc6e508"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6a561320485017ddfc21bd2ed5de2d70184f754f1c9b1947c55f8e2b0163a268"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2098811cd241429c08b7fc5c9e41fcc3f59f27c2e8d1da2ccdcf6c8e340ab507"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8f8d179393e6f0cf6c7c950576892ea6acbcea0a320838c61968ac7046f59228"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:eff87c68058374e45225089e4538c26329a13499bc0104b52b77f8428eed36b2"},
- {file = "simplejson-3.19.1-cp36-cp36m-win32.whl", hash = "sha256:d300773b93eed82f6da138fd1d081dc96fbe53d96000a85e41460fe07c8d8b33"},
- {file = "simplejson-3.19.1-cp36-cp36m-win_amd64.whl", hash = "sha256:37724c634f93e5caaca04458f267836eb9505d897ab3947b52f33b191bf344f3"},
- {file = "simplejson-3.19.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74bf802debe68627227ddb665c067eb8c73aa68b2476369237adf55c1161b728"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70128fb92932524c89f373e17221cf9535d7d0c63794955cc3cd5868e19f5d38"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8090e75653ea7db75bc21fa5f7bcf5f7bdf64ea258cbbac45c7065f6324f1b50"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a755f7bfc8adcb94887710dc70cc12a69a454120c6adcc6f251c3f7b46ee6aac"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ccb2c1877bc9b25bc4f4687169caa925ffda605d7569c40e8e95186e9a5e58b"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:919bc5aa4d8094cf8f1371ea9119e5d952f741dc4162810ab714aec948a23fe5"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e333c5b62e93949f5ac27e6758ba53ef6ee4f93e36cc977fe2e3df85c02f6dc4"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3a4480e348000d89cf501b5606415f4d328484bbb431146c2971123d49fd8430"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cb502cde018e93e75dc8fc7bb2d93477ce4f3ac10369f48866c61b5e031db1fd"},
- {file = "simplejson-3.19.1-cp37-cp37m-win32.whl", hash = "sha256:f41915a4e1f059dfad614b187bc06021fefb5fc5255bfe63abf8247d2f7a646a"},
- {file = "simplejson-3.19.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3844305bc33d52c4975da07f75b480e17af3558c0d13085eaa6cc2f32882ccf7"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1cb19eacb77adc5a9720244d8d0b5507421d117c7ed4f2f9461424a1829e0ceb"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:926957b278de22797bfc2f004b15297013843b595b3cd7ecd9e37ccb5fad0b72"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b0e9a5e66969f7a47dc500e3dba8edc3b45d4eb31efb855c8647700a3493dd8a"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79d46e7e33c3a4ef853a1307b2032cfb7220e1a079d0c65488fbd7118f44935a"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344a5093b71c1b370968d0fbd14d55c9413cb6f0355fdefeb4a322d602d21776"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23fbb7b46d44ed7cbcda689295862851105c7594ae5875dce2a70eeaa498ff86"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3025e7e9ddb48813aec2974e1a7e68e63eac911dd5e0a9568775de107ac79a"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:87b190e6ceec286219bd6b6f13547ca433f977d4600b4e81739e9ac23b5b9ba9"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc935d8322ba9bc7b84f99f40f111809b0473df167bf5b93b89fb719d2c4892b"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3b652579c21af73879d99c8072c31476788c8c26b5565687fd9db154070d852a"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6aa7ca03f25b23b01629b1c7f78e1cd826a66bfb8809f8977a3635be2ec48f1a"},
- {file = "simplejson-3.19.1-cp38-cp38-win32.whl", hash = "sha256:08be5a241fdf67a8e05ac7edbd49b07b638ebe4846b560673e196b2a25c94b92"},
- {file = "simplejson-3.19.1-cp38-cp38-win_amd64.whl", hash = "sha256:ca56a6c8c8236d6fe19abb67ef08d76f3c3f46712c49a3b6a5352b6e43e8855f"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6424d8229ba62e5dbbc377908cfee9b2edf25abd63b855c21f12ac596cd18e41"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:547ea86ca408a6735335c881a2e6208851027f5bfd678d8f2c92a0f02c7e7330"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:889328873c35cb0b2b4c83cbb83ec52efee5a05e75002e2c0c46c4e42790e83c"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cdb4e544134f305b033ad79ae5c6b9a32e7c58b46d9f55a64e2a883fbbba01"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2b3f06430cbd4fac0dae5b2974d2bf14f71b415fb6de017f498950da8159b1"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d125e754d26c0298715bdc3f8a03a0658ecbe72330be247f4b328d229d8cf67f"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:476c8033abed7b1fd8db62a7600bf18501ce701c1a71179e4ce04ac92c1c5c3c"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:199a0bcd792811c252d71e3eabb3d4a132b3e85e43ebd93bfd053d5b59a7e78b"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a79b439a6a77649bb8e2f2644e6c9cc0adb720fc55bed63546edea86e1d5c6c8"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:203412745fed916fc04566ecef3f2b6c872b52f1e7fb3a6a84451b800fb508c1"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca922c61d87b4c38f37aa706520328ffe22d7ac1553ef1cadc73f053a673553"},
- {file = "simplejson-3.19.1-cp39-cp39-win32.whl", hash = "sha256:3e0902c278243d6f7223ba3e6c5738614c971fd9a887fff8feaa8dcf7249c8d4"},
- {file = "simplejson-3.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:d396b610e77b0c438846607cd56418bfc194973b9886550a98fd6724e8c6cfec"},
- {file = "simplejson-3.19.1-py3-none-any.whl", hash = "sha256:4710806eb75e87919b858af0cba4ffedc01b463edc3982ded7b55143f39e41e1"},
- {file = "simplejson-3.19.1.tar.gz", hash = "sha256:6277f60848a7d8319d27d2be767a7546bc965535b28070e310b3a9af90604a4c"},
+ {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"},
+ {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"},
+ {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"},
+ {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"},
+ {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"},
+ {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"},
+ {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"},
+ {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"},
+ {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"},
+ {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"},
+ {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"},
+ {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"},
+ {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"},
+ {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"},
]
[[package]]
@@ -2043,7 +1965,7 @@ files = [
name = "snowballstemmer"
version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
@@ -2117,13 +2039,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "sphinx-basic-ng"
-version = "1.0.0b1"
+version = "1.0.0b2"
description = "A modern skeleton for Sphinx themes."
optional = true
python-versions = ">=3.7"
files = [
- {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"},
- {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"},
+ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"},
+ {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"},
]
[package.dependencies]
@@ -2168,15 +2090,33 @@ sphinx = ">=3"
doc = ["furo", "myst-parser"]
test = ["pytest", "pytest-cov", "pytest-xdist"]
+[[package]]
+name = "sphinx-notfound-page"
+version = "1.0.0"
+description = "Sphinx extension to build a 404 page with absolute URLs"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "sphinx_notfound_page-1.0.0-py3-none-any.whl", hash = "sha256:40a5741a6b07245a08fe55dbbd603ad6719e191b1419ab2e5337c706ebd16554"},
+ {file = "sphinx_notfound_page-1.0.0.tar.gz", hash = "sha256:14cd388956de5cdf8710ab4ff31776ef8d85759c4f46014ee30f368e83bd3a3b"},
+]
+
+[package.dependencies]
+sphinx = ">=5"
+
+[package.extras]
+doc = ["sphinx-autoapi", "sphinx-rtd-theme", "sphinx-tabs", "sphinxemoji"]
+test = ["tox"]
+
[[package]]
name = "sphinx-reredirects"
-version = "0.1.2"
+version = "0.1.3"
description = "Handles redirects for moved pages in Sphinx documentation projects"
optional = true
python-versions = ">=3.5"
files = [
- {file = "sphinx_reredirects-0.1.2-py3-none-any.whl", hash = "sha256:3a22161771aadd448bb608a4fe7277252182a337af53c18372b7104531d71489"},
- {file = "sphinx_reredirects-0.1.2.tar.gz", hash = "sha256:a0e7213304759b01edc22f032f1715a1c61176fc8f167164e7a52b9feec9ac64"},
+ {file = "sphinx_reredirects-0.1.3-py3-none-any.whl", hash = "sha256:02c53437c467cf9ed89863eff3addedc01d129624b2f03ab6302518fb77a2c89"},
+ {file = "sphinx_reredirects-0.1.3.tar.gz", hash = "sha256:56e222d285f76c944fd370f36ad3a1a66103a88b552e97d3d24a622bb9465de8"},
]
[package.dependencies]
@@ -2273,107 +2213,92 @@ test = ["pytest"]
[[package]]
name = "sqlalchemy"
-version = "1.4.48"
+version = "2.0.23"
description = "Database Abstraction Library"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-files = [
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"},
- {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"},
+python-versions = ">=3.7"
+files = [
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"},
+ {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"},
+ {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"},
+ {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"},
+ {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"},
+ {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"},
+ {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"},
+ {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"},
+ {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"},
]
[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"}
+greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+typing-extensions = ">=4.2.0"
[package.extras]
-aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
+aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
-mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
+mypy = ["mypy (>=0.910)"]
+mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
+oracle = ["cx-oracle (>=8)"]
+oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
+postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
-pymysql = ["pymysql", "pymysql (<1)"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
+pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
-[[package]]
-name = "sqlalchemy2-stubs"
-version = "0.0.2a35"
-description = "Typing Stubs for SQLAlchemy 1.4"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "sqlalchemy2-stubs-0.0.2a35.tar.gz", hash = "sha256:bd5d530697d7e8c8504c7fe792ef334538392a5fb7aa7e4f670bfacdd668a19d"},
- {file = "sqlalchemy2_stubs-0.0.2a35-py3-none-any.whl", hash = "sha256:593784ff9fc0dc2ded1895e3322591689db3be06f3ca006e3ef47640baf2d38a"},
-]
-
-[package.dependencies]
-typing-extensions = ">=3.7.4"
-
-[[package]]
-name = "termcolor"
-version = "2.3.0"
-description = "ANSI color formatting for output in terminal"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"},
- {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"},
-]
-
-[package.extras]
-tests = ["pytest", "pytest-cov"]
-
[[package]]
name = "text-unidecode"
version = "1.3"
@@ -2386,25 +2311,80 @@ files = [
]
[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
+name = "time-machine"
+version = "2.10.0"
+description = "Travel through time in your tests."
optional = false
python-versions = ">=3.7"
files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+ {file = "time_machine-2.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d5e93c14b935d802a310c1d4694a9fe894b48a733ebd641c9a570d6f9e1f667"},
+ {file = "time_machine-2.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c0dda6b132c0180941944ede357109016d161d840384c2fb1096a3a2ef619f4"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:900517e4a4121bf88527343d6aea2b5c99df134815bb8271ef589ec792502a71"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:860279c7f9413bc763b3d1aee622937c4538472e2e58ad668546b49a797cb9fb"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f451be286d50ec9b685198c7f76cea46538b8c57ec816f60edf5eb68d71c4f4"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1b07f5da833b2d8ea170cdf15a322c6fa2c6f7e9097a1bea435adc597cdcb5d"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b3a529ecc819488783e371df5ad315e790b9558c6945a236b13d7cb9ab73b9a"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51e36491bd4a43f8a937ca7c0d1a2287b8998f41306f47ebed250a02f93d2fe4"},
+ {file = "time_machine-2.10.0-cp310-cp310-win32.whl", hash = "sha256:1e9973091ad3272c719dafae35a5bb08fa5433c2902224d0f745657f9e3ac327"},
+ {file = "time_machine-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab82ea5a59faa1faa7397465f2edd94789a13f543daa02d16244906339100080"},
+ {file = "time_machine-2.10.0-cp310-cp310-win_arm64.whl", hash = "sha256:55bc6d666966fa2e6283d7433ebe875be37684a847eaa802075433c1ab3a377a"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99fc366cb4fa26d81f12fa36a929db0da89d99909e28231c045e0f1277e0db84"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5969f325c20bdcb7f8917a6ac2ef328ec41cc2e256320a99dfe38b4080eeae71"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:a1a5e283ab47b28205f33fa3c5a2df3fd9f07f09add63dbe76637c3633893a23"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4083ec185ab9ece3e5a7ca7a7589114a555f04bcff31b29d4eb47a37e87d97fe"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbe45f88399b8af299136435a2363764d5fa6d16a936e4505081b6ea32ff3e18"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d149a3fae8a06a3593361496ec036a27906fed478ade23ffc01dd402acd0b37"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e05306f63df3c7760170af6e77e1b37405b7c7c4a97cc9fdf0105f1094b1b1c"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d6d7b7680e34dbe60da34d75d6d5f31b6206c7149c0de8a7b0f0311d0ef7e3a"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:91b8b06e09e1dfd53dafe272d41b60690d6f8806d7194c62982b003a088dc423"},
+ {file = "time_machine-2.10.0-cp311-cp311-win32.whl", hash = "sha256:6241a1742657622ebdcd66cf6045c92e0ec6ca6365c55434cc7fea945008192c"},
+ {file = "time_machine-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:48cce6dcb7118ba4a58537c6de4d1dd6e7ad6ea15d0257d6e0003b45c4a839c2"},
+ {file = "time_machine-2.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:8cb6285095efa0833fd0301e159748a06e950c7744dc3d38e92e7607e2232d5a"},
+ {file = "time_machine-2.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8829ca7ed939419c2a23c360101edc51e3b57f40708d304b6aed16214d8b2a1f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b5b60bc00ad2efa5fefee117e5611a28b26f563f1a64df118d1d2f2590a679a"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1491fb647568134d38b06e844783d3069f5811405e9a3906eff88d55403e327"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78f2759a63fcc7660d283e22054c7cfa7468fad1ad86d0846819b6ea958d63f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:30881f263332245a665a49d0e30fda135597c4e18f2efa9c6759c224419c36a5"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e93750309093275340e0e95bb270801ec9cbf2ee8702d71031f4ccd8cc91dd7f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a906bb338a6be978b83f09f09d8b24737239330f280c890ecbf1c13828e1838c"},
+ {file = "time_machine-2.10.0-cp37-cp37m-win32.whl", hash = "sha256:10c8b170920d3f83dad2268ae8d5e1d8bb431a85198e32d778e6f3a1f93b172d"},
+ {file = "time_machine-2.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5efc4cc914d93138944c488fdebd6e4290273e3ac795d5c7a744af29eb04ce0f"},
+ {file = "time_machine-2.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1787887168e36f57d5ca1abf1b9d065a55eb67067df2fa23aaa4382da36f7098"},
+ {file = "time_machine-2.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26a8cc1f8e9f4f69ea3f50b9b9e3a699e80e44ac9359a867208be6adac30fc60"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07e2c6c299c5509c72cc221a19f4bf680c87c793727a3127a29e18ddad3db13"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f3e5f263a623148a448756a332aad45e65a59876fcb2511f7f61213e6d3ec3e"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3abcb48d7ca7ed95e5d99220317b7ce31378636bb020cabfa62f9099e7dad"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:545a813b7407c33dee388aa380449e79f57f02613ea149c6e907fc9ca3d53e64"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:458b52673ec83d10da279d989d7a6ad1e60c93e4ba986210d72e6c78e17102f4"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:acb2ca50d779d39eab1d0fab48697359e4ffc1aedfa58b79cd3a86ee13253834"},
+ {file = "time_machine-2.10.0-cp38-cp38-win32.whl", hash = "sha256:648fec54917a7e67acca38ed8e736b206e8a9688730e13e1cf7a74bcce89dec7"},
+ {file = "time_machine-2.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3ed92d2a6e2c2b7a0c8161ecca5d012041b7ba147cbdfb2b7f62f45c02615111"},
+ {file = "time_machine-2.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6d2588581d3071d556f96954d084b7b99701e54120bb29dfadaab04791ef6ae4"},
+ {file = "time_machine-2.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:185f7a4228e993ddae610e24fb3c7e7891130ebb6a40f42d58ea3be0bfafe1b1"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8225eb813ea9488de99e61569fc1b2d148d236473a84c6758cc436ffef4c043"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f03ac22440b00abd1027bfb7dd793dfeffb72dda26f336f4d561835e0ce6117"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4252f4daef831556e6685853d7a61b02910d0465528c549f179ea4e36aaeb14c"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:58c65bf4775fca62e1678cb234f1ca90254e811d978971c819d2cd24e1b7f136"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8527ac8fca7b92556c3c4c0f08e0bea995202db4be5b7d95b9b2ccbcb63649f2"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4684308d749fdb0c22af173b081206d2a5a85d2154a683a7f4a60c4b667f7a65"},
+ {file = "time_machine-2.10.0-cp39-cp39-win32.whl", hash = "sha256:2adc24cf25b7e8d08aea2b109cc42c5db76817b07ee709fae5c66afa4ec7bc6e"},
+ {file = "time_machine-2.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:36f5be6f3042734fca043bedafbfbb6ad4809352e40b3283cb46b151a823674c"},
+ {file = "time_machine-2.10.0-cp39-cp39-win_arm64.whl", hash = "sha256:c1775a949dd830579d1af5a271ec53d920dc01657035ad305f55c5a1ac9b9f1e"},
+ {file = "time_machine-2.10.0.tar.gz", hash = "sha256:64fd89678cf589fc5554c311417128b2782222dd65f703bf248ef41541761da0"},
]
+[package.dependencies]
+python-dateutil = "*"
+
[[package]]
-name = "tomlkit"
-version = "0.11.8"
-description = "Style preserving TOML library"
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"},
- {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"},
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
@@ -2429,90 +2409,107 @@ files = [
[[package]]
name = "typed-ast"
-version = "1.5.4"
+version = "1.5.5"
description = "a fork of Python 2 and 3 ast modules with type comment support"
optional = false
python-versions = ">=3.6"
files = [
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
- {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
- {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
- {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
- {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
- {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
- {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
- {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
- {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"},
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"},
+ {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"},
+ {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"},
+ {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"},
]
[[package]]
name = "types-jsonschema"
-version = "4.17.0.9"
+version = "4.17.0.10"
description = "Typing stubs for jsonschema"
optional = false
python-versions = "*"
files = [
- {file = "types-jsonschema-4.17.0.9.tar.gz", hash = "sha256:ddbbf84a37ba19f486e43d2a4ab239c9e49aebb5cc99a17a5d59f54568373376"},
- {file = "types_jsonschema-4.17.0.9-py3-none-any.whl", hash = "sha256:ec83f48c5ce5d3ea6955c3617d8c903e5ba3db8debea0c7f5c8e9bd60d782a9e"},
+ {file = "types-jsonschema-4.17.0.10.tar.gz", hash = "sha256:8e979db34d69bc9f9b3d6e8b89bdbc60b3a41cfce4e1fb87bf191d205c7f5098"},
+ {file = "types_jsonschema-4.17.0.10-py3-none-any.whl", hash = "sha256:3aa2a89afbd9eaa6ce0c15618b36f02692a621433889ce73014656f7d8caf971"},
]
[[package]]
name = "types-python-dateutil"
-version = "2.8.19.13"
+version = "2.8.19.14"
description = "Typing stubs for python-dateutil"
optional = false
python-versions = "*"
files = [
- {file = "types-python-dateutil-2.8.19.13.tar.gz", hash = "sha256:09a0275f95ee31ce68196710ed2c3d1b9dc42e0b61cc43acc369a42cb939134f"},
- {file = "types_python_dateutil-2.8.19.13-py3-none-any.whl", hash = "sha256:0b0e7c68e7043b0354b26a1e0225cb1baea7abb1b324d02b50e2d08f1221043f"},
+ {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"},
+ {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"},
]
[[package]]
name = "types-pytz"
-version = "2023.3.0.0"
+version = "2023.3.1.1"
description = "Typing stubs for pytz"
optional = false
python-versions = "*"
files = [
- {file = "types-pytz-2023.3.0.0.tar.gz", hash = "sha256:ecdc70d543aaf3616a7e48631543a884f74205f284cefd6649ddf44c6a820aac"},
- {file = "types_pytz-2023.3.0.0-py3-none-any.whl", hash = "sha256:4fc2a7fbbc315f0b6630e0b899fd6c743705abe1094d007b0e612d10da15e0f3"},
+ {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"},
+ {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"},
]
[[package]]
name = "types-pyyaml"
-version = "6.0.12.10"
+version = "6.0.12.12"
description = "Typing stubs for PyYAML"
optional = false
python-versions = "*"
files = [
- {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"},
- {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"},
+ {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"},
+ {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"},
]
[[package]]
name = "types-requests"
-version = "2.31.0.1"
+version = "2.31.0.6"
description = "Typing stubs for requests"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"},
- {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"},
+ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"},
+ {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"},
]
[package.dependencies]
@@ -2520,24 +2517,24 @@ types-urllib3 = "*"
[[package]]
name = "types-simplejson"
-version = "3.19.0.1"
+version = "3.19.0.2"
description = "Typing stubs for simplejson"
optional = false
python-versions = "*"
files = [
- {file = "types-simplejson-3.19.0.1.tar.gz", hash = "sha256:0233df016477bd58a2525df79ac8a34b079910d51ca45ec4f09a94ae58222f02"},
- {file = "types_simplejson-3.19.0.1-py3-none-any.whl", hash = "sha256:0083e84d43b6b36e8af6eb77e6b41440f2aec8842d16cee0f828fb5622196f4f"},
+ {file = "types-simplejson-3.19.0.2.tar.gz", hash = "sha256:ebc81f886f89d99d6b80c726518aa2228bc77c26438f18fd81455e4f79f8ee1b"},
+ {file = "types_simplejson-3.19.0.2-py3-none-any.whl", hash = "sha256:8ba093dc7884f59b3e62aed217144085e675a269debc32678fd80e0b43b2b86f"},
]
[[package]]
name = "types-urllib3"
-version = "1.26.25.13"
+version = "1.26.25.14"
description = "Typing stubs for urllib3"
optional = false
python-versions = "*"
files = [
- {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"},
- {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"},
+ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"},
+ {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"},
]
[[package]]
@@ -2553,141 +2550,42 @@ files = [
[[package]]
name = "urllib3"
-version = "1.26.16"
+version = "1.26.18"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"},
- {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"},
+ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
+ {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
-[[package]]
-name = "wcwidth"
-version = "0.2.6"
-description = "Measures the displayed width of unicode strings in a terminal"
-optional = false
-python-versions = "*"
-files = [
- {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
- {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
-]
-
-[[package]]
-name = "wrapt"
-version = "1.15.0"
-description = "Module for decorators, wrappers and monkey patching."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-files = [
- {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
- {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
- {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
- {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
- {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
- {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
- {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
- {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
- {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
- {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
- {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
- {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
- {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
- {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
- {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
- {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
- {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
- {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
- {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
-]
-
[[package]]
name = "xdoctest"
-version = "1.1.1"
+version = "1.1.2"
description = "A rewrite of the builtin doctest module"
optional = false
python-versions = ">=3.6"
files = [
- {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"},
- {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"},
+ {file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"},
+ {file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"},
]
-[package.dependencies]
-six = "*"
-
[package.extras]
-all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"]
-all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
+all = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)", "typing (>=3.7.4)"]
+all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
colors = ["Pygments", "Pygments", "colorama"]
-jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"]
-optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"]
-optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
-runtime-strict = ["six (==1.11.0)"]
-tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"]
+jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "nbconvert"]
+optional = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"]
+optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
+tests = ["pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "typing (>=3.7.4)"]
tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"]
tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"]
-tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
+tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
[[package]]
name = "zipp"
@@ -2705,11 +2603,11 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[extras]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-reredirects"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinx-reredirects"]
s3 = ["fs-s3fs"]
testing = ["pytest", "pytest-durations"]
[metadata]
lock-version = "2.0"
-python-versions = "<3.12,>=3.7.1"
-content-hash = "ef713a1192d52c92e45d00697c48d51df216f225476d2a5744954302b438dc8e"
+python-versions = ">=3.7.1,<4"
+content-hash = "34ce3dd7949cb103c5402241e56df1316fc215c9f31da078f61be964c100f43a"
diff --git a/pyproject.toml b/pyproject.toml
index bac2a0f7d3..92f11465fb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,9 +1,9 @@
[tool.poetry]
name = "singer-sdk"
-version = "0.30.0"
+version = "0.33.1"
description = "A framework for building Singer taps"
-authors = ["Meltano Team and Contributors"]
-maintainers = ["Meltano Team and Contributors"]
+authors = ["Meltano Team and Contributors "]
+maintainers = ["Meltano Team and Contributors "]
readme = "README.md"
homepage = "https://sdk.meltano.com/en/latest/"
repository = "https://github.com/meltano/sdk"
@@ -38,47 +38,50 @@ license = "Apache-2.0"
"Youtube" = "https://www.youtube.com/meltano"
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-backoff = ">=2.0.0,<3.0"
-pendulum = "^2.1.0"
+python = ">=3.7.1,<4"
+backoff = ">=2.0.0"
click = "~=8.0"
-fs = "^2.4.16"
-PyJWT = "~=2.4"
-requests = "^2.25.1"
cryptography = ">=3.4.6,<42.0.0"
-importlib-metadata = {version = "<5.0.0", markers = "python_version < \"3.8\""}
-importlib-resources = {version = "5.12.0", markers = "python_version < \"3.9\""}
+fs = ">=2.4.16"
+importlib-metadata = {version = "<7.0.0", markers = "python_version < \"3.8\""}
+importlib-resources = {version = ">=5.12.0", markers = "python_version < \"3.9\""}
+inflection = ">=0.5.1"
+joblib = ">=1.0.1"
+jsonpath-ng = ">=1.5.3"
+jsonschema = ">=4.16.0"
memoization = ">=0.3.2,<0.5.0"
-jsonpath-ng = "^1.5.3"
-joblib = "^1.0.1"
-inflection = "^0.5.1"
-sqlalchemy = "^1.4"
-python-dotenv = ">=0.20,<0.22"
-typing-extensions = "^4.2.0"
-simplejson = "^3.17.6"
-jsonschema = "^4.16.0"
packaging = ">=23.1"
+pendulum = ">=2.1.0"
+PyJWT = "~=2.4"
+python-dotenv = ">=0.20,<0.22"
pytz = ">=2022.2.1,<2024.0.0"
-PyYAML = "^6.0"
+PyYAML = ">=6.0"
+requests = ">=2.25.1"
+simpleeval = ">=0.9.13"
+simplejson = ">=3.17.6"
+sqlalchemy = ">=1.4,<3.0"
+typing-extensions = ">=4.2.0"
# urllib3 2.0 is not compatible with botocore
urllib3 = ">=1.26,<2"
# Sphinx dependencies installed as optional 'docs' extras
# https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-664002569
-sphinx = {version = ">=4.5,<6.0", optional = true}
-furo = {version = ">=2022.12.7,<2024.0.0", optional = true}
-sphinx-copybutton = {version = ">=0.3.1,<0.6.0", optional = true}
-myst-parser = {version = ">=0.17.2,<1.1.0", optional = true}
-sphinx-autobuild = {version = "^2021.3.14", optional = true}
-sphinx-reredirects = {version = "^0.1.1", optional = true}
+sphinx = {version = ">=4.5", optional = true}
+furo = {version = ">=2022.12.7", optional = true}
+sphinx-copybutton = {version = ">=0.3.1", optional = true}
+myst-parser = {version = ">=1", optional = true}
+sphinx-autobuild = {version = ">=2021.3.14", optional = true}
sphinx-inline-tabs = {version = ">=2023.4.21", optional = true, markers = "python_version >= \"3.8\""}
+sphinx-notfound-page = {version = ">=1.0.0", optional = true, python = ">=3.8"}
+sphinx-reredirects = {version = ">=0.1.1", optional = true}
# File storage dependencies installed as optional 'filesystem' extras
-fs-s3fs = {version = "^1.1.1", optional = true}
+fs-s3fs = {version = ">=1.1.1", optional = true}
# Testing dependencies installed as optional 'testing' extras
-pytest = {version="^7.2.1", optional = true}
-pytest-durations = {version = "^1.2.0", optional = true}
+pytest = {version=">=7.2.1", optional = true}
+pytest-durations = {version = ">=1.2.0", optional = true}
+backports-datetime-fromisoformat = { version = ">=2.0.1", python = "<3.11" }
[tool.poetry.extras]
docs = [
@@ -87,8 +90,9 @@ docs = [
"sphinx-copybutton",
"myst-parser",
"sphinx-autobuild",
- "sphinx-reredirects",
"sphinx-inline-tabs",
+ "sphinx-notfound-page",
+ "sphinx-reredirects",
]
s3 = ["fs-s3fs"]
testing = [
@@ -98,43 +102,39 @@ testing = [
[tool.poetry.group.dev.dependencies]
# snowflake-connector-python = "2.0.4" # Removed: Too many version conflicts!
-commitizen-version-bump = { git = "https://github.com/meltano/commitizen-version-bump.git", branch = "main" }
-xdoctest = "^1.1.1"
-mypy = "^1.0"
-cookiecutter = ">=2.1.1,<2.2.3"
-PyYAML = "^6.0"
-freezegun = "^1.2.2"
+cookiecutter = ">=2.1.1"
+coverage = {extras = ["toml"], version = ">=7.2"}
+duckdb = ">=0.8.0"
+duckdb-engine = ">=0.9.2"
+mypy = ">=1.0"
numpy = [
{ version = "<1.22", python = "<3.8" },
{ version = ">=1.22", python = ">=3.8" },
]
-requests-mock = "^1.10.0"
-sqlalchemy2-stubs = {version = "^0.0.2a32", allow-prereleases = true}
-types-jsonschema = "^4.17.0.6"
-types-python-dateutil = "^2.8.19"
-types-pytz = ">=2022.7.1.2,<2024.0.0.0"
-types-requests = "^2.28.11"
-types-simplejson = "^3.18.0"
-types-PyYAML = "^6.0.12"
-coverage = {extras = ["toml"], version = "^7.2"}
-pyarrow = ">=11,<13"
-pytest-snapshot = "^0.9.0"
+pyarrow = [
+ { version = ">=11", python = "<3.8" },
+ { version = ">=13", python = ">=3.8" }
+]
+pytest-benchmark = ">=4.0.0"
+pytest-snapshot = ">=0.9.0"
+requests-mock = ">=1.10.0"
+time-machine = ">=2.10.0"
+types-jsonschema = ">=4.17.0.6"
+types-python-dateutil = ">=2.8.19"
+types-pytz = ">=2022.7.1.2"
+types-requests = ">=2.28.11"
+types-simplejson = ">=3.18.0"
+types-PyYAML = ">=6.0.12"
+xdoctest = ">=1.1.1"
-# Cookiecutter tests
-black = "^23.1"
-darglint = "^1.8.0"
-flake8 = "^3.9.0"
-flake8-annotations = "^2.9.1"
-flake8-docstrings = "^1.7.0"
+[tool.poetry.group.benchmark.dependencies]
+pytest-codspeed = ">=2.2.0"
[tool.black]
exclude = ".*simpleeval.*"
[tool.pytest.ini_options]
addopts = '-vvv --ignore=singer_sdk/helpers/_simpleeval.py -m "not external"'
-filterwarnings = [
- "error::sqlalchemy.exc.RemovedIn20Warning",
-]
markers = [
"external: Tests relying on external resources",
"windows: Tests that only run on Windows",
@@ -145,13 +145,16 @@ norecursedirs = "cookiecutter"
[tool.commitizen]
name = "cz_version_bump"
-version = "0.30.0"
+version = "0.33.1"
+changelog_merge_prerelease = true
+prerelease_offset = 1
tag_format = "v$major.$minor.$patch$prerelease"
version_files = [
- "docs/conf.py",
+ "docs/conf.py:^release =",
"pyproject.toml:^version =",
"cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml:singer-sdk",
"cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml:singer-sdk",
+ "cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml:singer-sdk",
".github/ISSUE_TEMPLATE/bug.yml:^ placeholder:",
]
@@ -190,9 +193,6 @@ fail_under = 82
[tool.mypy]
exclude = "tests"
files = "singer_sdk"
-plugins = [
- "sqlalchemy.ext.mypy.plugin",
-]
python_version = "3.8"
warn_unused_configs = true
warn_unused_ignores = true
@@ -201,13 +201,9 @@ warn_return_any = true
[[tool.mypy.overrides]]
ignore_missing_imports = true
module = [
- "bcrypt.*",
- "joblib.*",
- "pyarrow.*",
- "pandas.*",
+ "backports.datetime_fromisoformat.*",
+ "joblib.*", # TODO: Remove when https://github.com/joblib/joblib/issues/1516 is shipped
"jsonpath_ng.*",
- "samples.*",
- "sqlalchemy.*",
]
[build-system]
@@ -215,9 +211,14 @@ requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
-pytest11 = { callable = "singer_sdk:testing.pytest_plugin", extras = ["testing"] }
+pytest11 = { reference = "singer_sdk:testing.pytest_plugin", extras = ["testing"], type = "console" }
[tool.ruff]
+line-length = 88
+src = ["samples", "singer_sdk", "tests"]
+target-version = "py37"
+
+[tool.ruff.lint]
exclude = [
"cookiecutter/*",
"singer_sdk/helpers/_simpleeval.py",
@@ -227,8 +228,9 @@ ignore = [
"ANN101", # Missing type annotation for `self` in method
"ANN102", # Missing type annotation for `cls` in class method
"N818", # Exception name should be named with an Error suffix
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
-line-length = 88
select = [
"F", # Pyflakes
"E", # pycodestyle (error)
@@ -249,7 +251,8 @@ select = [
"C4", # flake8-comprehensions
"DTZ", # flake8-datetimezs
"T10", # flake8-debugger
- "EM", # flake8-error-message
+ "EM", # flake8-errmsg
+ "FA", # flake8-future-annotations
"ISC", # flake8-implicit-str-concat
"ICN", # flake8-import-conventions
"G", # flake8-logging-format
@@ -260,6 +263,7 @@ select = [
"Q", # flake8-quotes
"RSE", # flake8-raise
"RET", # flake8-return
+ # "SLF", # flake8-self
"SIM", # flake8-simplify
"TID", # flake8-tidy-imports
"TCH", # flake8-type-checking
@@ -271,12 +275,14 @@ select = [
"PLE", # pylint (error)
"PLR", # pylint (refactor)
"PLW", # pylint (warning)
+ "PERF", # perflint
"RUF", # ruff
]
-src = ["samples", "singer_sdk", "tests"]
-target-version = "py37"
+unfixable = [
+ "ERA", # Don't remove commented out code
+]
-[tool.ruff.per-file-ignores]
+[tool.ruff.lint.per-file-ignores]
"docs/conf.py" = [
"D", # pydocstyle/flake8-docstrings
"I002", # isort: missing-required-import
@@ -292,32 +298,32 @@ target-version = "py37"
"singer_sdk/testing/*.py" = ["S101"]
"singer_sdk/testing/templates.py" = ["ANN401"]
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
mypy-init-return = true
suppress-dummy-args = true
-[tool.ruff.flake8-import-conventions]
+[tool.ruff.lint.flake8-import-conventions]
banned-from = ["typing"]
-[tool.ruff.flake8-import-conventions.extend-aliases]
+[tool.ruff.lint.flake8-import-conventions.extend-aliases]
typing = "t"
-[tool.ruff.flake8-pytest-style]
+[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
parametrize-names-type = "csv"
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["singer_sdk", "samples", "tests"]
required-imports = ["from __future__ import annotations"]
-[tool.ruff.pep8-naming]
+[tool.ruff.lint.pep8-naming]
classmethod-decorators = [
"singer_sdk.cli.plugin_cli",
]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
-[tool.ruff.pylint]
+[tool.ruff.lint.pylint]
max-args = 9
diff --git a/samples/sample_custom_sql_adapter/__init__.py b/samples/sample_custom_sql_adapter/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/samples/sample_custom_sql_adapter/connector.py b/samples/sample_custom_sql_adapter/connector.py
new file mode 100644
index 0000000000..6f7745a73b
--- /dev/null
+++ b/samples/sample_custom_sql_adapter/connector.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+import typing as t
+
+from sqlalchemy.engine.default import DefaultDialect
+
+if t.TYPE_CHECKING:
+ from types import ModuleType
+
+
+class CustomSQLDialect(DefaultDialect):
+ """Custom SQLite dialect that supports JSON."""
+
+ name = "myrdbms"
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def import_dbapi(cls):
+ """Import the sqlite3 DBAPI."""
+ import sqlite3
+
+ return sqlite3
+
+ @classmethod
+ def dbapi(cls) -> ModuleType: # type: ignore[override]
+ """Return the DBAPI module.
+
+ NOTE: This is a legacy method that will stop being used by SQLAlchemy at some point.
+ """ # noqa: E501
+ return cls.import_dbapi()
diff --git a/samples/sample_tap_hostile/hostile_streams.py b/samples/sample_tap_hostile/hostile_streams.py
index e711b769f0..0da506242e 100644
--- a/samples/sample_tap_hostile/hostile_streams.py
+++ b/samples/sample_tap_hostile/hostile_streams.py
@@ -28,9 +28,7 @@ class HostilePropertyNamesStream(Stream):
@staticmethod
def get_random_lowercase_string():
- return "".join(
- random.choice(string.ascii_lowercase) for _ in range(10) # noqa: S311
- )
+ return "".join(random.choice(string.ascii_lowercase) for _ in range(10)) # noqa: S311
def get_records(
self,
diff --git a/samples/sample_tap_sqlite/__init__.py b/samples/sample_tap_sqlite/__init__.py
index e4a14b3a57..3aed5d21d0 100644
--- a/samples/sample_tap_sqlite/__init__.py
+++ b/samples/sample_tap_sqlite/__init__.py
@@ -34,6 +34,9 @@ class SQLiteStream(SQLStream):
connector_class = SQLiteConnector
+ # Use a smaller state message frequency to check intermediate state.
+ STATE_MSG_FREQUENCY = 10
+
class SQLiteTap(SQLTap):
"""The Tap class for SQLite."""
diff --git a/samples/sample_target_sqlite/__init__.py b/samples/sample_target_sqlite/__init__.py
index 4cd6ddd61f..8e43a5e87f 100644
--- a/samples/sample_target_sqlite/__init__.py
+++ b/samples/sample_target_sqlite/__init__.py
@@ -19,6 +19,7 @@ class SQLiteConnector(SQLConnector):
allow_temp_tables = False
allow_column_alter = False
allow_merge_upsert = True
+ allow_overwrite: bool = True
def get_sqlalchemy_url(self, config: dict[str, t.Any]) -> str:
"""Generates a SQLAlchemy URL for SQLite."""
diff --git a/singer_sdk/_singerlib/catalog.py b/singer_sdk/_singerlib/catalog.py
index 77fe884d8f..87b528466a 100644
--- a/singer_sdk/_singerlib/catalog.py
+++ b/singer_sdk/_singerlib/catalog.py
@@ -31,11 +31,7 @@ def __missing__(self, breadcrumb: Breadcrumb) -> bool:
Returns:
True if the breadcrumb is selected, False otherwise.
"""
- if len(breadcrumb) >= 2: # noqa: PLR2004
- parent = breadcrumb[:-2]
- return self[parent]
-
- return True
+ return self[breadcrumb[:-2]] if len(breadcrumb) >= 2 else True # noqa: PLR2004
@dataclass
@@ -71,7 +67,7 @@ def from_dict(cls: type[Metadata], value: dict[str, t.Any]) -> Metadata:
)
def to_dict(self) -> dict[str, t.Any]:
- """Convert metadata to a JSON-encodeable dictionary.
+ """Convert metadata to a JSON-encodable dictionary.
Returns:
Metadata object.
diff --git a/singer_sdk/_singerlib/messages.py b/singer_sdk/_singerlib/messages.py
index 7fc17e57dc..02c0f30f2a 100644
--- a/singer_sdk/_singerlib/messages.py
+++ b/singer_sdk/_singerlib/messages.py
@@ -6,9 +6,10 @@
import sys
import typing as t
from dataclasses import asdict, dataclass, field
+from datetime import timezone
-import pytz
import simplejson as json
+from dateutil.parser import parse
if t.TYPE_CHECKING:
from datetime import datetime
@@ -84,6 +85,27 @@ class RecordMessage(Message):
time_extracted: datetime | None = None
"""The time the record was extracted."""
+ @classmethod
+ def from_dict(cls: type[RecordMessage], data: dict[str, t.Any]) -> RecordMessage:
+ """Create a record message from a dictionary.
+
+ This overrides the default conversion logic, since it uses unnecessary
+ deep copying and is very slow.
+
+ Args:
+ data: The dictionary to create the message from.
+
+ Returns:
+ The created message.
+ """
+ time_extracted = data.get("time_extracted")
+ return cls(
+ stream=data["stream"],
+ record=data["record"],
+ version=data.get("version"),
+ time_extracted=parse(time_extracted) if time_extracted else None,
+ )
+
def to_dict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
@@ -119,7 +141,7 @@ def __post_init__(self) -> None:
raise ValueError(msg)
if self.time_extracted:
- self.time_extracted = self.time_extracted.astimezone(pytz.utc)
+ self.time_extracted = self.time_extracted.astimezone(timezone.utc)
@dataclass
diff --git a/singer_sdk/_singerlib/schema.py b/singer_sdk/_singerlib/schema.py
index 9ef615e0d5..41dd8104b4 100644
--- a/singer_sdk/_singerlib/schema.py
+++ b/singer_sdk/_singerlib/schema.py
@@ -21,6 +21,7 @@
"minLength",
"format",
"type",
+ "default",
"required",
"enum",
"pattern",
@@ -47,6 +48,7 @@ class Schema:
"""
type: str | list[str] | None = None # noqa: A003
+ default: t.Any | None = None
properties: dict | None = None
items: t.Any | None = None
description: str | None = None
diff --git a/singer_sdk/authenticators.py b/singer_sdk/authenticators.py
index 61382daba3..fcba67e7b6 100644
--- a/singer_sdk/authenticators.py
+++ b/singer_sdk/authenticators.py
@@ -5,7 +5,7 @@
import base64
import math
import typing as t
-from datetime import datetime, timedelta
+from datetime import timedelta
from types import MappingProxyType
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
@@ -19,6 +19,8 @@
if t.TYPE_CHECKING:
import logging
+ from pendulum import DateTime
+
from singer_sdk.streams.rest import RESTStream
@@ -378,7 +380,7 @@ def __init__(
# Initialize internal tracking attributes
self.access_token: str | None = None
self.refresh_token: str | None = None
- self.last_refreshed: datetime | None = None
+ self.last_refreshed: DateTime | None = None
self.expires_in: int | None = None
@property
@@ -462,9 +464,7 @@ def client_id(self) -> str | None:
Returns:
Optional client secret from stream config if it has been set.
"""
- if self.config:
- return self.config.get("client_id")
- return None
+ return self.config.get("client_id") if self.config else None
@property
def client_secret(self) -> str | None:
@@ -473,9 +473,7 @@ def client_secret(self) -> str | None:
Returns:
Optional client secret from stream config if it has been set.
"""
- if self.config:
- return self.config.get("client_secret")
- return None
+ return self.config.get("client_secret") if self.config else None
def is_token_valid(self) -> bool:
"""Check if token is valid.
@@ -487,9 +485,7 @@ def is_token_valid(self) -> bool:
return False
if not self.expires_in:
return True
- if self.expires_in > (utc_now() - self.last_refreshed).total_seconds():
- return True
- return False
+ return self.expires_in > (utc_now() - self.last_refreshed).total_seconds() # type: ignore[no-any-return]
# Authentication and refresh
def update_access_token(self) -> None:
@@ -520,7 +516,7 @@ def update_access_token(self) -> None:
self.expires_in = int(expiration) if expiration else None
if self.expires_in is None:
self.logger.debug(
- "No expires_in receied in OAuth response and no "
+ "No expires_in received in OAuth response and no "
"default_expiration set. Token will be treated as if it never "
"expires.",
)
@@ -566,7 +562,7 @@ def oauth_request_body(self) -> dict:
@property
def oauth_request_payload(self) -> dict:
- """Return request paytload for OAuth request.
+ """Return request payload for OAuth request.
Returns:
Payload object for OAuth.
diff --git a/singer_sdk/connectors/sql.py b/singer_sdk/connectors/sql.py
index aecfbb0c10..91846d46a4 100644
--- a/singer_sdk/connectors/sql.py
+++ b/singer_sdk/connectors/sql.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+import decimal
+import json
import logging
import typing as t
import warnings
@@ -9,12 +11,14 @@
from datetime import datetime
from functools import lru_cache
+import simplejson
import sqlalchemy
from sqlalchemy.engine import Engine
from singer_sdk import typing as th
from singer_sdk._singerlib import CatalogEntry, MetadataMapping, Schema
from singer_sdk.exceptions import ConfigValidationError
+from singer_sdk.helpers.capabilities import TargetLoadMethods
if t.TYPE_CHECKING:
from sqlalchemy.engine.reflection import Inspector
@@ -37,6 +41,7 @@ class SQLConnector:
allow_column_rename: bool = True # Whether RENAME COLUMN is supported.
allow_column_alter: bool = False # Whether altering column types is supported.
allow_merge_upsert: bool = False # Whether MERGE UPSERT is supported.
+ allow_overwrite: bool = False # Whether overwrite load method is supported.
allow_temp_tables: bool = True # Whether temp tables are supported.
_cached_engine: Engine | None = None
@@ -53,6 +58,8 @@ def __init__(
"""
self._config: dict[str, t.Any] = config or {}
self._sqlalchemy_url: str | None = sqlalchemy_url or None
+ self._table_cols_cache: dict[str, dict[str, sqlalchemy.Column]] = {}
+ self._schema_cache: set[str] = set()
@property
def config(self) -> dict:
@@ -179,7 +186,7 @@ def get_sqlalchemy_url(self, config: dict[str, t.Any]) -> str:
@staticmethod
def to_jsonschema_type(
sql_type: (
- str
+ str # noqa: ANN401
| sqlalchemy.types.TypeEngine
| type[sqlalchemy.types.TypeEngine]
| t.Any
@@ -316,7 +323,21 @@ def create_engine(self) -> Engine:
Returns:
A new SQLAlchemy Engine.
"""
- return sqlalchemy.create_engine(self.sqlalchemy_url, echo=False)
+ try:
+ return sqlalchemy.create_engine(
+ self.sqlalchemy_url,
+ echo=False,
+ json_serializer=self.serialize_json,
+ json_deserializer=self.deserialize_json,
+ )
+ except TypeError:
+ self.logger.exception(
+ "Retrying engine creation with fewer arguments due to TypeError.",
+ )
+ return sqlalchemy.create_engine(
+ self.sqlalchemy_url,
+ echo=False,
+ )
def quote(self, name: str) -> str:
"""Quote a name if it needs quoting, using '.' as a name-part delimiter.
@@ -565,8 +586,12 @@ def schema_exists(self, schema_name: str) -> bool:
Returns:
True if the database schema exists, False if not.
"""
- schema_names = sqlalchemy.inspect(self._engine).get_schema_names()
- return schema_name in schema_names
+ if schema_name not in self._schema_cache:
+ self._schema_cache = set(
+ sqlalchemy.inspect(self._engine).get_schema_names(),
+ )
+
+ return schema_name in self._schema_cache
def get_table_columns(
self,
@@ -582,20 +607,24 @@ def get_table_columns(
Returns:
An ordered list of column objects.
"""
- _, schema_name, table_name = self.parse_full_table_name(full_table_name)
- inspector = sqlalchemy.inspect(self._engine)
- columns = inspector.get_columns(table_name, schema_name)
-
- return {
- col_meta["name"]: sqlalchemy.Column(
- col_meta["name"],
- col_meta["type"],
- nullable=col_meta.get("nullable", False),
- )
- for col_meta in columns
- if not column_names
- or col_meta["name"].casefold() in {col.casefold() for col in column_names}
- }
+ if full_table_name not in self._table_cols_cache:
+ _, schema_name, table_name = self.parse_full_table_name(full_table_name)
+ inspector = sqlalchemy.inspect(self._engine)
+ columns = inspector.get_columns(table_name, schema_name)
+
+ self._table_cols_cache[full_table_name] = {
+ col_meta["name"]: sqlalchemy.Column(
+ col_meta["name"],
+ col_meta["type"],
+ nullable=col_meta.get("nullable", False),
+ )
+ for col_meta in columns
+ if not column_names
+ or col_meta["name"].casefold()
+ in {col.casefold() for col in column_names}
+ }
+
+ return self._table_cols_cache[full_table_name]
def get_table(
self,
@@ -642,7 +671,7 @@ def create_schema(self, schema_name: str) -> None:
Args:
schema_name: The target schema to create.
"""
- with self._connect() as conn:
+ with self._connect() as conn, conn.begin():
conn.execute(sqlalchemy.schema.CreateSchema(schema_name))
def create_empty_table(
@@ -758,6 +787,16 @@ def prepare_table(
as_temp_table=as_temp_table,
)
return
+ if self.config["load_method"] == TargetLoadMethods.OVERWRITE:
+ self.get_table(full_table_name=full_table_name).drop(self._engine)
+ self.create_empty_table(
+ full_table_name=full_table_name,
+ schema=schema,
+ primary_keys=primary_keys,
+ partition_keys=partition_keys,
+ as_temp_table=as_temp_table,
+ )
+ return
for property_name, property_def in schema["properties"].items():
self.prepare_column(
@@ -813,7 +852,7 @@ def rename_column(self, full_table_name: str, old_name: str, new_name: str) -> N
column_name=old_name,
new_column_name=new_name,
)
- with self._connect() as conn:
+ with self._connect() as conn, conn.begin():
conn.execute(column_rename_ddl)
def merge_sql_types(
@@ -863,9 +902,6 @@ def merge_sql_types(
if issubclass(
generic_type,
(sqlalchemy.types.String, sqlalchemy.types.Unicode),
- ) or issubclass(
- generic_type,
- (sqlalchemy.types.String, sqlalchemy.types.Unicode),
):
# If length None or 0 then is varchar max ?
if (
@@ -1122,5 +1158,37 @@ def _adapt_column_type(
column_name=column_name,
column_type=compatible_sql_type,
)
- with self._connect() as conn:
+ with self._connect() as conn, conn.begin():
conn.execute(alter_column_ddl)
+
+ def serialize_json(self, obj: object) -> str:
+ """Serialize an object to a JSON string.
+
+ Target connectors may override this method to provide custom serialization logic
+ for JSON types.
+
+ Args:
+ obj: The object to serialize.
+
+ Returns:
+ The JSON string.
+
+ .. versionadded:: 0.31.0
+ """
+ return simplejson.dumps(obj, use_decimal=True)
+
+ def deserialize_json(self, json_str: str) -> object:
+ """Deserialize a JSON string to an object.
+
+ Tap connectors may override this method to provide custom deserialization
+ logic for JSON types.
+
+ Args:
+ json_str: The JSON string to deserialize.
+
+ Returns:
+ The deserialized object.
+
+ .. versionadded:: 0.31.0
+ """
+ return json.loads(json_str, parse_float=decimal.Decimal)
diff --git a/singer_sdk/exceptions.py b/singer_sdk/exceptions.py
index 23325aa2ae..75135e800a 100644
--- a/singer_sdk/exceptions.py
+++ b/singer_sdk/exceptions.py
@@ -12,11 +12,30 @@
class ConfigValidationError(Exception):
"""Raised when a user's config settings fail validation."""
+ def __init__(
+ self,
+ message: str,
+ *,
+ errors: list[str] | None = None,
+ ) -> None:
+ """Initialize a ConfigValidationError.
+
+ Args:
+ message: A message describing the error.
+ errors: A list of errors which caused the validation error.
+ """
+ super().__init__(message)
+ self.errors = errors or []
+
class FatalAPIError(Exception):
"""Exception raised when a failed request should not be considered retriable."""
+class InvalidReplicationKeyException(Exception):
+ """Exception to raise if the replication key is not in the stream properties."""
+
+
class InvalidStreamSortException(Exception):
"""Exception to raise if sorting errors are found while syncing the records."""
diff --git a/singer_sdk/helpers/_compat.py b/singer_sdk/helpers/_compat.py
index 87033ea4c5..20b7a399a1 100644
--- a/singer_sdk/helpers/_compat.py
+++ b/singer_sdk/helpers/_compat.py
@@ -11,4 +11,9 @@
from importlib import metadata
from typing import final # noqa: ICN003
-__all__ = ["metadata", "final"]
+if sys.version_info < (3, 9):
+ import importlib_resources as resources
+else:
+ from importlib import resources
+
+__all__ = ["metadata", "final", "resources"]
diff --git a/singer_sdk/helpers/_conformers.py b/singer_sdk/helpers/_conformers.py
index 46963284e2..0ca70e85c4 100644
--- a/singer_sdk/helpers/_conformers.py
+++ b/singer_sdk/helpers/_conformers.py
@@ -16,11 +16,13 @@ def snakecase(string: str) -> str:
"""
string = re.sub(r"[\-\.\s]", "_", string)
string = (
- string[0].lower()
- + re.sub(
- r"[A-Z]",
- lambda matched: "_" + str(matched.group(0).lower()),
- string[1:],
+ (
+ string[0].lower()
+ + re.sub(
+ r"[A-Z]",
+ lambda matched: f"_{matched.group(0).lower()!s}",
+ string[1:],
+ )
)
if string
else string
diff --git a/singer_sdk/helpers/_flattening.py b/singer_sdk/helpers/_flattening.py
index 29ef35cc2e..eeb244277d 100644
--- a/singer_sdk/helpers/_flattening.py
+++ b/singer_sdk/helpers/_flattening.py
@@ -4,12 +4,12 @@
import collections
import itertools
-import json
import re
import typing as t
from copy import deepcopy
import inflection
+import simplejson as json
DEFAULT_FLATTENING_SEPARATOR = "__"
@@ -155,17 +155,7 @@ def flatten_schema(
"type": "string"
},
"foo__bar": {
- "type": "object",
- "properties": {
- "baz": {
- "type": "object",
- "properties": {
- "qux": {
- "type": "string"
- }
- }
- }
- }
+ "type": "string"
}
}
}
@@ -178,25 +168,115 @@ def flatten_schema(
"type": "string"
},
"foo__bar__baz": {
- "type": "object",
+ "type": "string"
+ }
+ }
+ }
+
+ >>> print(json.dumps(flatten_schema(schema, 3), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "foo__bar__baz__qux": {
+ "type": "string"
+ }
+ }
+ }
+
+ >>> nullable_leaves_schema = {
+ ... "type": "object",
+ ... "properties": {
+ ... "id": {
+ ... "type": "string"
+ ... },
+ ... "foo": {
+ ... "type": ["object", "null"],
+ ... "properties": {
+ ... "bar": {
+ ... "type": ["object", "null"],
+ ... "properties": {
+ ... "baz": {
+ ... "type": ["object", "null"],
+ ... "properties": {
+ ... "qux": {
+ ... "type": "string"
+ ... }
+ ... }
+ ... }
+ ... }
+ ... }
+ ... }
+ ... }
+ ... }
+ ... }
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 0), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "foo": {
+ "type": [
+ "object",
+ "null"
+ ],
"properties": {
- "qux": {
- "type": "string"
+ "bar": {
+ "type": [
+ "object",
+ "null"
+ ],
+ "properties": {
+ "baz": {
+ "type": [
+ "object",
+ "null"
+ ],
+ "properties": {
+ "qux": {
+ "type": "string"
+ }
+ }
+ }
+ }
}
}
}
}
}
- >>> print(json.dumps(flatten_schema(schema, 3), indent=2))
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 1), indent=2))
{
"type": "object",
"properties": {
"id": {
"type": "string"
},
- "foo__bar__baz__qux": {
+ "foo__bar": {
+ "type": [
+ "string",
+ "null"
+ ]
+ }
+ }
+ }
+
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 2), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
"type": "string"
+ },
+ "foo__bar__baz": {
+ "type": [
+ "string",
+ "null"
+ ]
}
}
}
@@ -210,7 +290,7 @@ def flatten_schema(
return new_schema
-def _flatten_schema( # noqa: C901
+def _flatten_schema( # noqa: C901, PLR0912
schema_node: dict,
parent_keys: list[str] | None = None,
separator: str = "__",
@@ -236,40 +316,55 @@ def _flatten_schema( # noqa: C901
if "properties" not in schema_node:
return {}
- for k, v in schema_node["properties"].items():
- new_key = flatten_key(k, parent_keys, separator)
- if "type" in v:
- if "object" in v["type"] and "properties" in v and level < max_level:
+ for field_name, field_schema in schema_node["properties"].items():
+ new_key = flatten_key(field_name, parent_keys, separator)
+ if "type" in field_schema:
+ if (
+ "object" in field_schema["type"]
+ and "properties" in field_schema
+ and level < max_level
+ ):
items.extend(
_flatten_schema(
- v,
- [*parent_keys, k],
+ field_schema,
+ [*parent_keys, field_name],
separator=separator,
level=level + 1,
max_level=max_level,
).items(),
)
+ elif (
+ "array" in field_schema["type"]
+ or "object" in field_schema["type"]
+ and max_level > 0
+ ):
+ types = (
+ ["string", "null"] if "null" in field_schema["type"] else "string"
+ )
+ items.append((new_key, {"type": types}))
else:
- items.append((new_key, v))
- elif len(v.values()) > 0:
- if list(v.values())[0][0]["type"] == "string":
- list(v.values())[0][0]["type"] = ["null", "string"]
- items.append((new_key, list(v.values())[0][0]))
- elif list(v.values())[0][0]["type"] == "array":
- list(v.values())[0][0]["type"] = ["null", "array"]
- items.append((new_key, list(v.values())[0][0]))
- elif list(v.values())[0][0]["type"] == "object":
- list(v.values())[0][0]["type"] = ["null", "object"]
- items.append((new_key, list(v.values())[0][0]))
+ items.append((new_key, field_schema))
+ # TODO: Figure out what this really does, try breaking it.
+ # If it's not needed, remove it.
+ elif len(field_schema.values()) > 0:
+ if next(iter(field_schema.values()))[0]["type"] == "string":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "string"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
+ elif next(iter(field_schema.values()))[0]["type"] == "array":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "array"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
+ elif next(iter(field_schema.values()))[0]["type"] == "object":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "object"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
# Sort and check for duplicates
def _key_func(item):
return item[0] # first item is tuple is the key name.
sorted_items = sorted(items, key=_key_func)
- for k, g in itertools.groupby(sorted_items, key=_key_func):
+ for field_name, g in itertools.groupby(sorted_items, key=_key_func):
if len(list(g)) > 1:
- msg = f"Duplicate column name produced in schema: {k}"
+ msg = f"Duplicate column name produced in schema: {field_name}"
raise ValueError(msg)
# Return the (unsorted) result as a dict.
@@ -347,7 +442,7 @@ def _flatten_record(
items.append(
(
new_key,
- json.dumps(v)
+ json.dumps(v, use_decimal=True)
if _should_jsondump_value(k, v, flattened_schema)
else v,
),
@@ -370,12 +465,9 @@ def _should_jsondump_value(key: str, value: t.Any, flattened_schema=None) -> boo
if isinstance(value, (dict, list)):
return True
- if (
+ return bool(
flattened_schema
and key in flattened_schema
and "type" in flattened_schema[key]
and set(flattened_schema[key]["type"]) == {"null", "object", "array"}
- ):
- return True
-
- return False
+ )
diff --git a/singer_sdk/helpers/_simpleeval.py b/singer_sdk/helpers/_simpleeval.py
deleted file mode 100644
index c3fb41c3fe..0000000000
--- a/singer_sdk/helpers/_simpleeval.py
+++ /dev/null
@@ -1,679 +0,0 @@
-"""
-Simpleeval module originally imported on 2021-09-16 from:
-- https://github.com/danthedeckie/simpleeval
-
-For more information:
-- https://gitlab.com/meltano/sdk/-/issues/213
-
--------------------------------------
-SimpleEval - (C) 2013-2019 Daniel Fairhead
--------------------------------------
-
-An short, easy to use, safe and reasonably extensible expression evaluator.
-Designed for things like in a website where you want to allow the user to
-generate a string, or a number from some other input, without allowing full
-eval() or other unsafe or needlessly complex linguistics.
-
--------------------------------------
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
--------------------------------------
-
-Initial idea copied from J.F. Sebastian on Stack Overflow
-( http://stackoverflow.com/a/9558001/1973500 ) with
-modifications and many improvements.
-
--------------------------------------
-Contributors:
-- corro (Robin Baumgartner) (py3k)
-- dratchkov (David R) (nested dicts)
-- marky1991 (Mark Young) (slicing)
-- T045T (Nils Berg) (!=, py3kstr, obj.
-- perkinslr (Logan Perkins) (.__globals__ or .func_ breakouts)
-- impala2 (Kirill Stepanov) (massive _eval refactor)
-- gk (ugik) (Other iterables than str can DOS too, and can be made)
-- daveisfera (Dave Johansen) 'not' Boolean op, Pycharm, pep8, various other fixes
-- xaled (Khalid Grandi) method chaining correctly, double-eval bugfix.
-- EdwardBetts (Edward Betts) spelling correction.
-- charlax (Charles-Axel Dein charlax) Makefile and cleanups
-- mommothazaz123 (Andrew Zhu) f"string" support, Python 3.8 support
-- lubieowoce (Uryga) various potential vulnerabilities
-- JCavallo (Jean Cavallo) names dict shouldn't be modified
-- Birne94 (Daniel Birnstiel) for fixing leaking generators.
-- patricksurry (Patrick Surry) or should return last value, even if falsy.
-- shughes-uk (Samantha Hughes) python w/o 'site' should not fail to import.
-
--------------------------------------
-Basic Usage:
-
->>> s = SimpleEval()
->>> s.eval("20 + 30")
-50
-
-You can add your own functions easily too:
-
-if file.txt contents is "11"
-
->>> def get_file():
-... with open("file.txt", 'r') as f:
-... return f.read()
-
->>> s.functions["get_file"] = get_file
->>> s.eval("int(get_file()) + 31")
-42
-
-For more information, see the full package documentation on pypi, or the github
-repo.
-
------------
-
-If you don't need to re-use the evaluator (with it's names, functions, etc),
-then you can use the simple_eval() function:
-
->>> simple_eval("21 + 19")
-40
-
-You can pass names, operators and functions to the simple_eval function as
-well:
-
->>> simple_eval("40 + two", names={"two": 2})
-42
-
-"""
-# flake8: noqa # Ignoring flake errors in imported module
-# isort: dont-add-imports
-
-import ast
-import operator as op
-import sys
-import warnings
-from random import random
-
-PYTHON3 = sys.version_info[0] == 3
-
-########################################
-# Module wide 'globals'
-
-MAX_STRING_LENGTH = 100000
-MAX_COMPREHENSION_LENGTH = 10000
-MAX_POWER = 4000000 # highest exponent
-DISALLOW_PREFIXES = ["_", "func_"]
-DISALLOW_METHODS = ["format", "format_map", "mro"]
-
-# Disallow functions:
-# This, strictly speaking, is not necessary. These /should/ never be accessable anyway,
-# if DISALLOW_PREFIXES and DISALLOW_METHODS are all right. This is here to try and help
-# people not be stupid. Allowing these functions opens up all sorts of holes - if any of
-# their functionality is required, then please wrap them up in a safe container. And think
-# very hard about it first. And don't say I didn't warn you.
-# builtins is a dict in python >3.6 but a module before
-DISALLOW_FUNCTIONS = {type, isinstance, eval, getattr, setattr, repr, compile, open}
-if hasattr(__builtins__, "help") or (
- hasattr(__builtins__, "__contains__") and "help" in __builtins__
-):
- # PyInstaller environment doesn't include this module.
- DISALLOW_FUNCTIONS.add(help)
-
-
-if PYTHON3:
- exec("DISALLOW_FUNCTIONS.add(exec)") # exec is not a function in Python2...
-
-
-########################################
-# Exceptions:
-
-
-class InvalidExpression(Exception):
- """ Generic Exception """
-
- pass
-
-
-class FunctionNotDefined(InvalidExpression):
- """ sorry! That function isn't defined! """
-
- def __init__(self, func_name, expression):
- self.message = "Function '{0}' not defined," " for expression '{1}'.".format(
- func_name, expression
- )
- setattr(self, "func_name", func_name) # bypass 2to3 confusion.
- self.expression = expression
-
- # pylint: disable=bad-super-call
- super(InvalidExpression, self).__init__(self.message)
-
-
-class NameNotDefined(InvalidExpression):
- """ a name isn't defined. """
-
- def __init__(self, name, expression):
- self.name = name
- self.message = "'{0}' is not defined for expression '{1}'".format(
- name, expression
- )
- self.expression = expression
-
- # pylint: disable=bad-super-call
- super(InvalidExpression, self).__init__(self.message)
-
-
-class AttributeDoesNotExist(InvalidExpression):
- """attribute does not exist"""
-
- def __init__(self, attr, expression):
- self.message = "Attribute '{0}' does not exist in expression '{1}'".format(
- attr, expression
- )
- self.attr = attr
- self.expression = expression
-
-
-class FeatureNotAvailable(InvalidExpression):
- """ What you're trying to do is not allowed. """
-
- pass
-
-
-class NumberTooHigh(InvalidExpression):
- """Sorry! That number is too high. I don't want to spend the
- next 10 years evaluating this expression!"""
-
- pass
-
-
-class IterableTooLong(InvalidExpression):
- """ That iterable is **way** too long, baby. """
-
- pass
-
-
-class AssignmentAttempted(UserWarning):
- pass
-
-
-########################################
-# Default simple functions to include:
-
-
-def random_int(top):
- """ return a random int below """
-
- return int(random() * top)
-
-
-def safe_power(a, b): # pylint: disable=invalid-name
- """ a limited exponent/to-the-power-of function, for safety reasons """
-
- if abs(a) > MAX_POWER or abs(b) > MAX_POWER:
- raise NumberTooHigh("Sorry! I don't want to evaluate {0} ** {1}".format(a, b))
- return a ** b
-
-
-def safe_mult(a, b): # pylint: disable=invalid-name
- """ limit the number of times an iterable can be repeated... """
-
- if hasattr(a, "__len__") and b * len(a) > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evalute something that long.")
- if hasattr(b, "__len__") and a * len(b) > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evalute something that long.")
-
- return a * b
-
-
-def safe_add(a, b): # pylint: disable=invalid-name
- """ iterable length limit again """
-
- if hasattr(a, "__len__") and hasattr(b, "__len__"):
- if len(a) + len(b) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "Sorry, adding those two together would" " make something too long."
- )
- return a + b
-
-
-########################################
-# Defaults for the evaluator:
-
-DEFAULT_OPERATORS = {
- ast.Add: safe_add,
- ast.Sub: op.sub,
- ast.Mult: safe_mult,
- ast.Div: op.truediv,
- ast.FloorDiv: op.floordiv,
- ast.Pow: safe_power,
- ast.Mod: op.mod,
- ast.Eq: op.eq,
- ast.NotEq: op.ne,
- ast.Gt: op.gt,
- ast.Lt: op.lt,
- ast.GtE: op.ge,
- ast.LtE: op.le,
- ast.Not: op.not_,
- ast.USub: op.neg,
- ast.UAdd: op.pos,
- ast.In: lambda x, y: op.contains(y, x),
- ast.NotIn: lambda x, y: not op.contains(y, x),
- ast.Is: lambda x, y: x is y,
- ast.IsNot: lambda x, y: x is not y,
-}
-
-DEFAULT_FUNCTIONS = {
- "rand": random,
- "randint": random_int,
- "int": int,
- "float": float,
- "str": str if PYTHON3 else unicode, # type: ignore # 'unicode' not defined
-}
-
-DEFAULT_NAMES = {"True": True, "False": False, "None": None}
-
-ATTR_INDEX_FALLBACK = True
-
-
-########################################
-# And the actual evaluator:
-
-
-class SimpleEval(object): # pylint: disable=too-few-public-methods
- """A very simple expression parser.
- >>> s = SimpleEval()
- >>> s.eval("20 + 30 - ( 10 * 5)")
- 0
- """
-
- expr = ""
-
- def __init__(self, operators=None, functions=None, names=None):
- """
- Create the evaluator instance. Set up valid operators (+,-, etc)
- functions (add, random, get_val, whatever) and names."""
-
- if not operators:
- operators = DEFAULT_OPERATORS.copy()
- if not functions:
- functions = DEFAULT_FUNCTIONS.copy()
- if not names:
- names = DEFAULT_NAMES.copy()
-
- self.operators = operators
- self.functions = functions
- self.names = names
-
- self.nodes = {
- ast.Expr: self._eval_expr,
- ast.Assign: self._eval_assign,
- ast.AugAssign: self._eval_aug_assign,
- ast.Import: self._eval_import,
- ast.Num: self._eval_num,
- ast.Str: self._eval_str,
- ast.Name: self._eval_name,
- ast.UnaryOp: self._eval_unaryop,
- ast.BinOp: self._eval_binop,
- ast.BoolOp: self._eval_boolop,
- ast.Compare: self._eval_compare,
- ast.IfExp: self._eval_ifexp,
- ast.Call: self._eval_call,
- ast.keyword: self._eval_keyword,
- ast.Subscript: self._eval_subscript,
- ast.Attribute: self._eval_attribute,
- ast.Index: self._eval_index,
- ast.Slice: self._eval_slice,
- }
-
- # py3k stuff:
- if hasattr(ast, "NameConstant"):
- self.nodes[ast.NameConstant] = self._eval_constant
-
- # py3.6, f-strings
- if hasattr(ast, "JoinedStr"):
- self.nodes[ast.JoinedStr] = self._eval_joinedstr # f-string
- self.nodes[
- ast.FormattedValue
- ] = self._eval_formattedvalue # formatted value in f-string
-
- # py3.8 uses ast.Constant instead of ast.Num, ast.Str, ast.NameConstant
- if hasattr(ast, "Constant"):
- self.nodes[ast.Constant] = self._eval_constant
-
- # Defaults:
-
- self.ATTR_INDEX_FALLBACK = ATTR_INDEX_FALLBACK
-
- # Check for forbidden functions:
-
- for f in self.functions.values():
- if f in DISALLOW_FUNCTIONS:
- raise FeatureNotAvailable(
- "This function {} is a really bad idea.".format(f)
- )
-
- def eval(self, expr):
- """evaluate an expresssion, using the operators, functions and
- names previously set up."""
-
- # set a copy of the expression aside, so we can give nice errors...
-
- self.expr = expr
-
- # and evaluate:
- return self._eval(ast.parse(expr.strip()).body[0])
-
- def _eval(self, node):
- """ The internal evaluator used on each node in the parsed tree. """
-
- try:
- handler = self.nodes[type(node)]
- except KeyError:
- raise FeatureNotAvailable(
- "Sorry, {0} is not available in this "
- "evaluator".format(type(node).__name__)
- )
-
- return handler(node)
-
- def _eval_expr(self, node):
- return self._eval(node.value)
-
- def _eval_assign(self, node):
- warnings.warn(
- "Assignment ({}) attempted, but this is ignored".format(self.expr),
- AssignmentAttempted,
- )
- return self._eval(node.value)
-
- def _eval_aug_assign(self, node):
- warnings.warn(
- "Assignment ({}) attempted, but this is ignored".format(self.expr),
- AssignmentAttempted,
- )
- return self._eval(node.value)
-
- def _eval_import(self, node):
- raise FeatureNotAvailable("Sorry, 'import' is not allowed.")
- return self._eval(node.value)
-
- @staticmethod
- def _eval_num(node):
- return node.n
-
- @staticmethod
- def _eval_str(node):
- if len(node.s) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "String Literal in statement is too long!"
- " ({0}, when {1} is max)".format(len(node.s), MAX_STRING_LENGTH)
- )
- return node.s
-
- @staticmethod
- def _eval_constant(node):
- if hasattr(node.value, "__len__") and len(node.value) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "Literal in statement is too long!"
- " ({0}, when {1} is max)".format(len(node.value), MAX_STRING_LENGTH)
- )
- return node.value
-
- def _eval_unaryop(self, node):
- return self.operators[type(node.op)](self._eval(node.operand))
-
- def _eval_binop(self, node):
- return self.operators[type(node.op)](
- self._eval(node.left), self._eval(node.right)
- )
-
- def _eval_boolop(self, node):
- if isinstance(node.op, ast.And):
- vout = False
- for value in node.values:
- vout = self._eval(value)
- if not vout:
- return vout
- return vout
- elif isinstance(node.op, ast.Or):
- for value in node.values:
- vout = self._eval(value)
- if vout:
- return vout
- return vout
-
- def _eval_compare(self, node):
- right = self._eval(node.left)
- to_return = True
- for operation, comp in zip(node.ops, node.comparators):
- if not to_return:
- break
- left = right
- right = self._eval(comp)
- to_return = self.operators[type(operation)](left, right)
- return to_return
-
- def _eval_ifexp(self, node):
- return (
- self._eval(node.body) if self._eval(node.test) else self._eval(node.orelse)
- )
-
- def _eval_call(self, node):
- if isinstance(node.func, ast.Attribute):
- func = self._eval(node.func)
- else:
- try:
- func = self.functions[node.func.id]
- except KeyError:
- raise FunctionNotDefined(node.func.id, self.expr)
- except AttributeError as e:
- raise FeatureNotAvailable("Lambda Functions not implemented")
-
- if func in DISALLOW_FUNCTIONS:
- raise FeatureNotAvailable("This function is forbidden")
-
- return func(
- *(self._eval(a) for a in node.args),
- **dict(self._eval(k) for k in node.keywords)
- )
-
- def _eval_keyword(self, node):
- return node.arg, self._eval(node.value)
-
- def _eval_name(self, node):
- try:
- # This happens at least for slicing
- # This is a safe thing to do because it is impossible
- # that there is a true exression assigning to none
- # (the compiler rejects it, so you can't even
- # pass that to ast.parse)
- if hasattr(self.names, "__getitem__"):
- return self.names[node.id]
- elif callable(self.names):
- return self.names(node)
- else:
- raise InvalidExpression(
- 'Trying to use name (variable) "{0}"'
- ' when no "names" defined for'
- " evaluator".format(node.id)
- )
-
- except KeyError:
- if node.id in self.functions:
- return self.functions[node.id]
-
- raise NameNotDefined(node.id, self.expr)
-
- def _eval_subscript(self, node):
- container = self._eval(node.value)
- key = self._eval(node.slice)
- try:
- return container[key]
- except KeyError:
- raise
-
- def _eval_attribute(self, node):
- for prefix in DISALLOW_PREFIXES:
- if node.attr.startswith(prefix):
- raise FeatureNotAvailable(
- "Sorry, access to __attributes "
- " or func_ attributes is not available. "
- "({0})".format(node.attr)
- )
- if node.attr in DISALLOW_METHODS:
- raise FeatureNotAvailable(
- "Sorry, this method is not available. " "({0})".format(node.attr)
- )
- # eval node
- node_evaluated = self._eval(node.value)
-
- # Maybe the base object is an actual object, not just a dict
- try:
- return getattr(node_evaluated, node.attr)
- except (AttributeError, TypeError):
- pass
-
- # TODO: is this a good idea? Try and look for [x] if .x doesn't work?
- if self.ATTR_INDEX_FALLBACK:
- try:
- return node_evaluated[node.attr]
- except (KeyError, TypeError):
- pass
-
- # If it is neither, raise an exception
- raise AttributeDoesNotExist(node.attr, self.expr)
-
- def _eval_index(self, node):
- return self._eval(node.value)
-
- def _eval_slice(self, node):
- lower = upper = step = None
- if node.lower is not None:
- lower = self._eval(node.lower)
- if node.upper is not None:
- upper = self._eval(node.upper)
- if node.step is not None:
- step = self._eval(node.step)
- return slice(lower, upper, step)
-
- def _eval_joinedstr(self, node):
- length = 0
- evaluated_values = []
- for n in node.values:
- val = str(self._eval(n))
- if len(val) + length > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evaluate something this long.")
- evaluated_values.append(val)
- return "".join(evaluated_values)
-
- def _eval_formattedvalue(self, node):
- if node.format_spec:
- fmt = "{:" + self._eval(node.format_spec) + "}"
- return fmt.format(self._eval(node.value))
- return self._eval(node.value)
-
-
-class EvalWithCompoundTypes(SimpleEval):
- """
- SimpleEval with additional Compound Types, and their respective
- function editions. (list, tuple, dict, set).
- """
-
- def __init__(self, operators=None, functions=None, names=None):
- super(EvalWithCompoundTypes, self).__init__(operators, functions, names)
-
- self.functions.update(list=list, tuple=tuple, dict=dict, set=set)
-
- self.nodes.update(
- {
- ast.Dict: self._eval_dict,
- ast.Tuple: self._eval_tuple,
- ast.List: self._eval_list,
- ast.Set: self._eval_set,
- ast.ListComp: self._eval_comprehension,
- ast.GeneratorExp: self._eval_comprehension,
- }
- )
-
- def eval(self, expr):
- self._max_count = 0
- return super(EvalWithCompoundTypes, self).eval(expr)
-
- def _eval_dict(self, node):
- return {self._eval(k): self._eval(v) for (k, v) in zip(node.keys, node.values)}
-
- def _eval_tuple(self, node):
- return tuple(self._eval(x) for x in node.elts)
-
- def _eval_list(self, node):
- return list(self._eval(x) for x in node.elts)
-
- def _eval_set(self, node):
- return set(self._eval(x) for x in node.elts)
-
- def _eval_comprehension(self, node):
- to_return = []
-
- extra_names = {}
-
- previous_name_evaller = self.nodes[ast.Name]
-
- def eval_names_extra(node):
- """
- Here we hide our extra scope for within this comprehension
- """
- if node.id in extra_names:
- return extra_names[node.id]
- return previous_name_evaller(node)
-
- self.nodes.update({ast.Name: eval_names_extra})
-
- def recurse_targets(target, value):
- """
- Recursively (enter, (into, (nested, name), unpacking)) = \
- and, (assign, (values, to), each
- """
- if isinstance(target, ast.Name):
- extra_names[target.id] = value
- else:
- for t, v in zip(target.elts, value):
- recurse_targets(t, v)
-
- def do_generator(gi=0):
- g = node.generators[gi]
- for i in self._eval(g.iter):
- self._max_count += 1
-
- if self._max_count > MAX_COMPREHENSION_LENGTH:
- raise IterableTooLong("Comprehension generates too many elements")
- recurse_targets(g.target, i)
- if all(self._eval(iff) for iff in g.ifs):
- if len(node.generators) > gi + 1:
- do_generator(gi + 1)
- else:
- to_return.append(self._eval(node.elt))
-
- try:
- do_generator()
- finally:
- self.nodes.update({ast.Name: previous_name_evaller})
-
- return to_return
-
-
-def simple_eval(expr, operators=None, functions=None, names=None):
- """ Simply evaluate an expresssion """
- s = SimpleEval(operators=operators, functions=functions, names=names)
- return s.eval(expr)
diff --git a/singer_sdk/helpers/_state.py b/singer_sdk/helpers/_state.py
index 9d01021866..a42a384331 100644
--- a/singer_sdk/helpers/_state.py
+++ b/singer_sdk/helpers/_state.py
@@ -18,7 +18,7 @@
STARTING_MARKER = "starting_replication_value"
-def get_state_if_exists( # noqa: PLR0911
+def get_state_if_exists(
tap_state: dict,
tap_stream_id: str,
state_partition_context: dict | None = None,
@@ -47,9 +47,7 @@ def get_state_if_exists( # noqa: PLR0911
stream_state = tap_state["bookmarks"][tap_stream_id]
if not state_partition_context:
- if key:
- return stream_state.get(key, None)
- return stream_state
+ return stream_state.get(key, None) if key else stream_state
if "partitions" not in stream_state:
return None # No partitions defined
@@ -59,9 +57,7 @@ def get_state_if_exists( # noqa: PLR0911
)
if matched_partition is None:
return None # Partition definition not present
- if key:
- return matched_partition.get(key, None)
- return matched_partition
+ return matched_partition.get(key, None) if key else matched_partition
def get_state_partitions_list(tap_state: dict, tap_stream_id: str) -> list[dict] | None:
@@ -84,10 +80,7 @@ def _find_in_partitions_list(
f"{{state_partition_context}}.\nMatching state values were: {found!s}"
)
raise ValueError(msg)
- if found:
- return t.cast(dict, found[0])
-
- return None
+ return t.cast(dict, found[0]) if found else None
def _create_in_partitions_list(
diff --git a/singer_sdk/helpers/_typing.py b/singer_sdk/helpers/_typing.py
index d3df38a5b7..3a87ab4b92 100644
--- a/singer_sdk/helpers/_typing.py
+++ b/singer_sdk/helpers/_typing.py
@@ -4,21 +4,21 @@
import copy
import datetime
+import logging
import typing as t
from enum import Enum
from functools import lru_cache
import pendulum
-if t.TYPE_CHECKING:
- import logging
-
_MAX_TIMESTAMP = "9999-12-31 23:59:59.999999"
_MAX_TIME = "23:59:59.999999"
JSONSCHEMA_ANNOTATION_SECRET = "secret" # noqa: S105
JSONSCHEMA_ANNOTATION_WRITEONLY = "writeOnly"
UTC = datetime.timezone.utc
+logger = logging.getLogger(__name__)
+
class DatetimeErrorTreatmentEnum(Enum):
"""Enum for treatment options for date parsing error."""
@@ -67,18 +67,19 @@ def append_type(type_dict: dict, new_type: str) -> dict:
result["type"] = [*type_array, new_type]
return result
- msg = (
+ logger.warning(
"Could not append type because the JSON schema for the dictionary "
- f"`{type_dict}` appears to be invalid."
+ "`%s` appears to be invalid.",
+ type_dict,
)
- raise ValueError(msg)
+ return result
def is_secret_type(type_dict: dict) -> bool:
"""Return True if JSON Schema type definition appears to be a secret.
Will return true if either `writeOnly` or `secret` are true on this type
- or any of the type's subproperties.
+ or any of the type's sub-properties.
Args:
type_dict: The JSON Schema type to check.
@@ -95,7 +96,7 @@ def is_secret_type(type_dict: dict) -> bool:
return True
if "properties" in type_dict:
- # Recursively check subproperties and return True if any child is secret.
+ # Recursively check sub-properties and return True if any child is secret.
return any(
is_secret_type(child_type_dict)
for child_type_dict in type_dict["properties"].values()
@@ -387,6 +388,7 @@ def conform_record_data_types(
return rec
+# TODO: This is in dire need of refactoring. It's a mess.
def _conform_record_data_types( # noqa: PLR0912
input_object: dict[str, t.Any],
schema: dict,
@@ -404,7 +406,7 @@ def _conform_record_data_types( # noqa: PLR0912
input_object: A single record
schema: JSON schema the given input_object is expected to meet
level: Specifies how recursive the conformance process should be
- parent: '.' seperated path to this element from the object root (for logging)
+ parent: '.' separated path to this element from the object root (for logging)
"""
output_object: dict[str, t.Any] = {}
unmapped_properties: list[str] = []
diff --git a/singer_sdk/helpers/capabilities.py b/singer_sdk/helpers/capabilities.py
index 55aff8ce3c..690f04db51 100644
--- a/singer_sdk/helpers/capabilities.py
+++ b/singer_sdk/helpers/capabilities.py
@@ -99,6 +99,47 @@
description="The default target database schema name to use for all streams.",
),
).to_dict()
+ADD_RECORD_METADATA_CONFIG = PropertiesList(
+ Property(
+ "add_record_metadata",
+ BooleanType(),
+ description="Add metadata to records.",
+ ),
+).to_dict()
+
+
+class TargetLoadMethods(str, Enum):
+ """Target-specific capabilities."""
+
+ # always write all input records whether that records already exists or not
+ APPEND_ONLY = "append-only"
+
+ # update existing records and insert new records
+ UPSERT = "upsert"
+
+ # delete all existing records and insert all input records
+ OVERWRITE = "overwrite"
+
+
+TARGET_LOAD_METHOD_CONFIG = PropertiesList(
+ Property(
+ "load_method",
+ StringType(),
+ description=(
+ "The method to use when loading data into the destination. "
+ "`append-only` will always write all input records whether that records "
+ "already exists or not. `upsert` will update existing records and insert "
+ "new records. `overwrite` will delete all existing records and insert all "
+ "input records."
+ ),
+ allowed_values=[
+ TargetLoadMethods.APPEND_ONLY,
+ TargetLoadMethods.UPSERT,
+ TargetLoadMethods.OVERWRITE,
+ ],
+ default=TargetLoadMethods.APPEND_ONLY,
+ ),
+).to_dict()
class DeprecatedEnum(Enum):
diff --git a/singer_sdk/helpers/jsonpath.py b/singer_sdk/helpers/jsonpath.py
index 9e2956f195..82c514b41d 100644
--- a/singer_sdk/helpers/jsonpath.py
+++ b/singer_sdk/helpers/jsonpath.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import logging
import typing as t
import memoization
@@ -11,6 +12,9 @@
import jsonpath_ng
+logger = logging.getLogger(__name__)
+
+
def extract_jsonpath(
expression: str,
input: dict | list, # noqa: A002
@@ -27,7 +31,11 @@ def extract_jsonpath(
compiled_jsonpath = _compile_jsonpath(expression)
match: jsonpath_ng.DatumInContext
- for match in compiled_jsonpath.find(input):
+ matches = compiled_jsonpath.find(input)
+
+ logger.info("JSONPath matches: %d", len(matches))
+
+ for match in matches:
yield match.value
diff --git a/singer_sdk/io_base.py b/singer_sdk/io_base.py
index 07da6e63e5..f7c2ed668b 100644
--- a/singer_sdk/io_base.py
+++ b/singer_sdk/io_base.py
@@ -10,7 +10,9 @@
import typing as t
from collections import Counter, defaultdict
-from singer_sdk._singerlib import SingerMessageType
+from singer_sdk._singerlib.messages import Message, SingerMessageType
+from singer_sdk._singerlib.messages import format_message as singer_format_message
+from singer_sdk._singerlib.messages import write_message as singer_write_message
from singer_sdk.helpers._compat import final
logger = logging.getLogger(__name__)
@@ -48,7 +50,7 @@ def _assert_line_requires(line_dict: dict, requires: set[str]) -> None:
if not requires.issubset(line_dict):
missing = requires - set(line_dict)
msg = f"Line is missing required {', '.join(missing)} key(s): {line_dict}"
- raise Exception(msg)
+ raise Exception(msg) # TODO: Raise a more specific exception
def deserialize_json(self, line: str) -> dict:
"""Deserialize a line of json.
@@ -143,3 +145,26 @@ def _process_unknown_message(self, message_dict: dict) -> None:
def _process_endofpipe(self) -> None:
logger.debug("End of pipe reached")
+
+
+class SingerWriter:
+ """Interface for all plugins writting Singer messages to stdout."""
+
+ def format_message(self, message: Message) -> str:
+ """Format a message as a JSON string.
+
+ Args:
+ message: The message to format.
+
+ Returns:
+ The formatted message.
+ """
+ return singer_format_message(message)
+
+ def write_message(self, message: Message) -> None:
+ """Write a message to stdout.
+
+ Args:
+ message: The message to write.
+ """
+ singer_write_message(message)
diff --git a/singer_sdk/mapper.py b/singer_sdk/mapper.py
index 031ca0c826..b48c497270 100644
--- a/singer_sdk/mapper.py
+++ b/singer_sdk/mapper.py
@@ -6,14 +6,17 @@
from __future__ import annotations
import abc
+import ast
import copy
import datetime
import hashlib
import logging
import typing as t
+import simpleeval # type: ignore[import]
+
+import singer_sdk.typing as th
from singer_sdk.exceptions import MapExpressionError, StreamMapConfigError
-from singer_sdk.helpers import _simpleeval as simpleeval
from singer_sdk.helpers._catalog import get_selected_schema
from singer_sdk.helpers._flattening import (
FlatteningOptions,
@@ -21,15 +24,6 @@
flatten_schema,
get_flattening_options,
)
-from singer_sdk.typing import (
- CustomType,
- IntegerType,
- JSONTypeHelper,
- NumberType,
- PropertiesList,
- Property,
- StringType,
-)
if t.TYPE_CHECKING:
import sys
@@ -84,7 +78,7 @@ def __init__(
flattening_options: Flattening options, or None to skip flattening.
"""
self.stream_alias = stream_alias
- self.raw_schema = raw_schema
+ self.raw_schema = copy.deepcopy(raw_schema)
self.raw_key_properties = key_properties
self.transformed_schema = raw_schema
self.transformed_key_properties = key_properties
@@ -267,6 +261,7 @@ def __init__(
self._transform_fn,
self.transformed_schema,
) = self._init_functions_and_schema(stream_map=map_transform)
+ self.expr_evaluator = simpleeval.EvalWithCompoundTypes(functions=self.functions)
def transform(self, record: dict) -> dict | None:
"""Return a transformed record.
@@ -278,10 +273,7 @@ def transform(self, record: dict) -> dict | None:
The transformed record.
"""
transformed_record = self._transform_fn(record)
- if not transformed_record:
- return None
-
- return super().transform(transformed_record)
+ return super().transform(transformed_record) if transformed_record else None
def get_filter_result(self, record: dict) -> bool:
"""Return True to include or False to exclude.
@@ -296,7 +288,7 @@ def get_filter_result(self, record: dict) -> bool:
@property
def functions(self) -> dict[str, t.Callable]:
- """Get availabale transformation functions.
+ """Get available transformation functions.
Returns:
Functions which should be available for expression evaluation.
@@ -304,18 +296,21 @@ def functions(self) -> dict[str, t.Callable]:
funcs: dict[str, t.Any] = simpleeval.DEFAULT_FUNCTIONS.copy()
funcs["md5"] = md5
funcs["datetime"] = datetime
+ funcs["bool"] = bool
return funcs
def _eval(
self,
expr: str,
+ expr_parsed: ast.Expr,
record: dict,
property_name: str | None,
) -> str | int | float:
"""Solve an expression.
Args:
- expr: String expression to evaluate.
+ expr: String expression to evaluate (used to raise human readable errors).
+ expr_parsed: Parsed expression abstract syntax tree.
record: Individual stream record.
property_name: Name of property to transform in the record.
@@ -333,10 +328,10 @@ def _eval(
# Allow access to original property value if applicable
names["self"] = record[property_name]
try:
- result: str | int | float = simpleeval.simple_eval(
+ self.expr_evaluator.names = names
+ result: str | int | float = self.expr_evaluator.eval(
expr,
- functions=self.functions,
- names=names,
+ previously_parsed=expr_parsed,
)
except (simpleeval.InvalidExpression, SyntaxError) as ex:
msg = f"Failed to evaluate simpleeval expressions {expr}."
@@ -349,8 +344,8 @@ def _eval(
def _eval_type(
self,
expr: str,
- default: JSONTypeHelper | None = None,
- ) -> JSONTypeHelper:
+ default: th.JSONTypeHelper | None = None,
+ ) -> th.JSONTypeHelper:
"""Evaluate an expression's type.
Args:
@@ -367,21 +362,25 @@ def _eval_type(
msg = "Expression should be str, not None"
raise ValueError(msg)
- default = default or StringType()
+ default = default or th.StringType()
+
+ # If a field is set to "record", then it should be an "object" in the schema
+ if expr == "record":
+ return th.CustomType(self.raw_schema)
if expr.startswith("float("):
- return NumberType()
+ return th.NumberType()
if expr.startswith("int("):
- return IntegerType()
+ return th.IntegerType()
if expr.startswith("str("):
- return StringType()
+ return th.StringType()
- if expr[0] == "'" and expr[-1] == "'":
- return StringType()
+ if expr.startswith("bool("):
+ return th.BooleanType()
- return default
+ return th.StringType() if expr[0] == "'" and expr[-1] == "'" else default
def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
self,
@@ -398,6 +397,7 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
Raises:
NotImplementedError: TODO
StreamMapConfigError: TODO
+ MapExpressionError: TODO
"""
stream_map = copy.copy(stream_map)
@@ -405,6 +405,12 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
include_by_default = True
if stream_map and MAPPER_FILTER_OPTION in stream_map:
filter_rule = stream_map.pop(MAPPER_FILTER_OPTION)
+ try:
+ filter_rule_parsed: ast.Expr = ast.parse(filter_rule).body[0] # type: ignore[arg-type,assignment]
+ except (SyntaxError, IndexError) as ex:
+ msg = f"Failed to parse expression {filter_rule}."
+ raise MapExpressionError(msg) from ex
+
logging.info(
"Found '%s' filter rule: %s",
self.stream_alias,
@@ -442,11 +448,12 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
transformed_schema = copy.copy(self.raw_schema)
if not include_by_default:
# Start with only the defined (or transformed) key properties
- transformed_schema = PropertiesList().to_dict()
+ transformed_schema = th.PropertiesList().to_dict()
if "properties" not in transformed_schema:
transformed_schema["properties"] = {}
+ stream_map_parsed: list[tuple[str, str | None, ast.Expr | None]] = []
for prop_key, prop_def in list(stream_map.items()):
if prop_def in {None, NULL_STRING}:
if prop_key in (self.transformed_key_properties or []):
@@ -459,8 +466,9 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
)
raise StreamMapConfigError(msg)
transformed_schema["properties"].pop(prop_key, None)
+ stream_map_parsed.append((prop_key, prop_def, None))
elif isinstance(prop_def, str):
- default_type: JSONTypeHelper = StringType() # Fallback to string
+ default_type: th.JSONTypeHelper = th.StringType() # Fallback to string
existing_schema: dict = (
# Use transformed schema if available
transformed_schema["properties"].get(prop_key, {})
@@ -469,14 +477,21 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
)
if existing_schema:
# Set default type if property exists already in JSON Schema
- default_type = CustomType(existing_schema)
+ default_type = th.CustomType(existing_schema)
transformed_schema["properties"].update(
- Property(
+ th.Property(
prop_key,
self._eval_type(prop_def, default=default_type),
).to_dict(),
)
+ try:
+ parsed_def: ast.Expr = ast.parse(prop_def).body[0] # type: ignore[assignment]
+ stream_map_parsed.append((prop_key, prop_def, parsed_def))
+ except (SyntaxError, IndexError) as ex:
+ msg = f"Failed to parse expression {prop_def}."
+ raise MapExpressionError(msg) from ex
+
else:
msg = (
f"Unexpected type '{type(prop_def).__name__}' in stream map for "
@@ -498,10 +513,14 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
# Declare function variables
- def eval_filter(filter_rule: str) -> t.Callable[[dict], bool]:
+ def eval_filter(
+ filter_rule: str,
+ filter_rule_parsed: ast.Expr,
+ ) -> t.Callable[[dict], bool]:
def _inner(record: dict) -> bool:
filter_result = self._eval(
expr=filter_rule,
+ expr_parsed=filter_rule_parsed,
record=record,
property_name=None,
)
@@ -523,7 +542,7 @@ def always_true(record: dict) -> bool:
return True
if isinstance(filter_rule, str):
- filter_fn = eval_filter(filter_rule)
+ filter_fn = eval_filter(filter_rule, filter_rule_parsed)
elif filter_rule is None:
filter_fn = always_true
else:
@@ -548,16 +567,17 @@ def transform_fn(record: dict) -> dict | None:
if key_property in record:
result[key_property] = record[key_property]
- for prop_key, prop_def in list(stream_map.items()):
+ for prop_key, prop_def, prop_def_parsed in stream_map_parsed:
if prop_def in {None, NULL_STRING}:
# Remove property from result
result.pop(prop_key, None)
continue
- if isinstance(prop_def, str):
+ if isinstance(prop_def_parsed, ast.Expr):
# Apply property transform
result[prop_key] = self._eval(
- expr=prop_def,
+ expr=prop_def, # type: ignore[arg-type]
+ expr_parsed=prop_def_parsed,
record=record,
property_name=prop_key,
)
diff --git a/singer_sdk/mapper_base.py b/singer_sdk/mapper_base.py
index b0be198bdb..2cc943a462 100644
--- a/singer_sdk/mapper_base.py
+++ b/singer_sdk/mapper_base.py
@@ -7,14 +7,16 @@
import click
-import singer_sdk._singerlib as singer
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers.capabilities import CapabilitiesEnum, PluginCapabilities
-from singer_sdk.io_base import SingerReader
+from singer_sdk.io_base import SingerReader, SingerWriter
from singer_sdk.plugin_base import PluginBase
+if t.TYPE_CHECKING:
+ import singer_sdk._singerlib as singer
-class InlineMapper(PluginBase, SingerReader, metaclass=abc.ABCMeta):
+
+class InlineMapper(PluginBase, SingerReader, SingerWriter, metaclass=abc.ABCMeta):
"""Abstract base class for inline mappers."""
@classproperty
@@ -28,10 +30,9 @@ def capabilities(self) -> list[CapabilitiesEnum]:
PluginCapabilities.STREAM_MAPS,
]
- @staticmethod
- def _write_messages(messages: t.Iterable[singer.Message]) -> None:
+ def _write_messages(self, messages: t.Iterable[singer.Message]) -> None:
for message in messages:
- singer.write_message(message)
+ self.write_message(message)
def _process_schema_message(self, message_dict: dict) -> None:
self._write_messages(self.map_schema_message(message_dict))
diff --git a/singer_sdk/metrics.py b/singer_sdk/metrics.py
index 89d51a3381..e4191eadf4 100644
--- a/singer_sdk/metrics.py
+++ b/singer_sdk/metrics.py
@@ -263,10 +263,9 @@ def __exit__(
exc_tb: The exception traceback.
"""
if Tag.STATUS not in self.tags:
- if exc_type is None:
- self.tags[Tag.STATUS] = Status.SUCCEEDED
- else:
- self.tags[Tag.STATUS] = Status.FAILED
+ self.tags[Tag.STATUS] = (
+ Status.SUCCEEDED if exc_type is None else Status.FAILED
+ )
log(self.logger, Point("timer", self.metric, self.elapsed(), self.tags))
def elapsed(self) -> float:
diff --git a/singer_sdk/plugin_base.py b/singer_sdk/plugin_base.py
index d81e8f7c38..b4e82296b5 100644
--- a/singer_sdk/plugin_base.py
+++ b/singer_sdk/plugin_base.py
@@ -6,6 +6,7 @@
import logging
import os
import sys
+import time
import typing as t
from pathlib import Path, PurePath
from types import MappingProxyType
@@ -71,6 +72,43 @@ def __init__(self) -> None:
super().__init__("Mapper not initialized. Please call setup_mapper() first.")
+class SingerCommand(click.Command):
+ """Custom click command class for Singer packages."""
+
+ def __init__(
+ self,
+ *args: t.Any,
+ logger: logging.Logger,
+ **kwargs: t.Any,
+ ) -> None:
+ """Initialize the command.
+
+ Args:
+ *args: Positional `click.Command` arguments.
+ logger: A logger instance.
+ **kwargs: Keyword `click.Command` arguments.
+ """
+ super().__init__(*args, **kwargs)
+ self.logger = logger
+
+ def invoke(self, ctx: click.Context) -> t.Any: # noqa: ANN401
+ """Invoke the command, capturing warnings and logging them.
+
+ Args:
+ ctx: The `click` context.
+
+ Returns:
+ The result of the command invocation.
+ """
+ logging.captureWarnings(capture=True)
+ try:
+ return super().invoke(ctx)
+ except ConfigValidationError as exc:
+ for error in exc.errors:
+ self.logger.error("Config validation error: %s", error)
+ sys.exit(1)
+
+
class PluginBase(metaclass=abc.ABCMeta):
"""Abstract base class for taps."""
@@ -149,11 +187,14 @@ def __init__(
if self._is_secret_config(k):
config_dict[k] = SecretString(v)
self._config = config_dict
+ metrics._setup_logging(self.config)
+ self.metrics_logger = metrics.get_metrics_logger()
+
self._validate_config(raise_errors=validate_config)
self._mapper: PluginMapper | None = None
- metrics._setup_logging(self.config)
- self.metrics_logger = metrics.get_metrics_logger()
+ # Initialization timestamp
+ self.__initialized_at = int(time.time() * 1000)
def setup_mapper(self) -> None:
"""Initialize the plugin mapper for this tap."""
@@ -185,6 +226,15 @@ def mapper(self, mapper: PluginMapper) -> None:
"""
self._mapper = mapper
+ @property
+ def initialized_at(self) -> int:
+ """Start time of the plugin.
+
+ Returns:
+ The start time of the plugin.
+ """
+ return self.__initialized_at
+
@classproperty
def capabilities(self) -> list[CapabilitiesEnum]:
"""Get capabilities.
@@ -338,27 +388,19 @@ def _is_secret_config(config_key: str) -> bool:
"""
return is_common_secret_key(config_key)
- def _validate_config(
- self,
- *,
- raise_errors: bool = True,
- warnings_as_errors: bool = False,
- ) -> tuple[list[str], list[str]]:
+ def _validate_config(self, *, raise_errors: bool = True) -> list[str]:
"""Validate configuration input against the plugin configuration JSON schema.
Args:
raise_errors: Flag to throw an exception if any validation errors are found.
- warnings_as_errors: Flag to throw an exception if any warnings were emitted.
Returns:
- A tuple of configuration validation warnings and errors.
+ A list of validation errors.
Raises:
ConfigValidationError: If raise_errors is True and validation fails.
"""
- warnings: list[str] = []
errors: list[str] = []
- log_fn = self.logger.info
config_jsonschema = self.config_jsonschema
if config_jsonschema:
@@ -376,19 +418,11 @@ def _validate_config(
f"JSONSchema was: {config_jsonschema}"
)
if raise_errors:
- raise ConfigValidationError(summary)
+ raise ConfigValidationError(summary, errors=errors)
- log_fn = self.logger.warning
- else:
- summary = f"Config validation passed with {len(warnings)} warnings."
- for warning in warnings:
- summary += f"\n{warning}"
+ self.logger.warning(summary)
- if warnings_as_errors and raise_errors and warnings:
- msg = f"One or more warnings ocurred during validation: {warnings}"
- raise ConfigValidationError(msg)
- log_fn(summary)
- return warnings, errors
+ return errors
@classmethod
def print_version(
@@ -542,7 +576,7 @@ def get_singer_command(cls: type[PluginBase]) -> click.Command:
Returns:
A callable CLI object.
"""
- return click.Command(
+ return SingerCommand(
name=cls.name,
callback=cls.invoke,
context_settings={"help_option_names": ["--help"]},
@@ -583,6 +617,7 @@ def get_singer_command(cls: type[PluginBase]) -> click.Command:
is_eager=True,
),
],
+ logger=cls.logger,
)
@plugin_cli
diff --git a/singer_sdk/sinks/core.py b/singer_sdk/sinks/core.py
index 9928aa6f29..32d027cfaa 100644
--- a/singer_sdk/sinks/core.py
+++ b/singer_sdk/sinks/core.py
@@ -6,14 +6,14 @@
import copy
import datetime
import json
+import sys
import time
import typing as t
from gzip import GzipFile
from gzip import open as gzip_open
from types import MappingProxyType
-from dateutil import parser
-from jsonschema import Draft7Validator, FormatChecker
+from jsonschema import Draft7Validator
from singer_sdk.exceptions import MissingKeyPropertiesError
from singer_sdk.helpers._batch import (
@@ -32,7 +32,12 @@
if t.TYPE_CHECKING:
from logging import Logger
- from singer_sdk.plugin_base import PluginBase
+ from singer_sdk.target_base import Target
+
+if sys.version_info < (3, 11):
+ from backports.datetime_fromisoformat import MonkeyPatch
+
+ MonkeyPatch.patch_fromisoformat()
JSONSchemaValidator = Draft7Validator
@@ -48,7 +53,7 @@ class Sink(metaclass=abc.ABCMeta):
def __init__(
self,
- target: PluginBase,
+ target: Target,
stream_name: str,
schema: dict,
key_properties: list[str] | None,
@@ -61,7 +66,8 @@ def __init__(
schema: Schema of the stream to sink.
key_properties: Primary key of the stream to sink.
"""
- self.logger = target.logger
+ self.logger = target.logger.getChild(stream_name)
+ self.sync_started_at = target.initialized_at
self._config = dict(target.config)
self._pending_batch: dict | None = None
self.stream_name = stream_name
@@ -89,7 +95,10 @@ def __init__(
self._batch_records_read: int = 0
self._batch_dupe_records_merged: int = 0
- self._validator = Draft7Validator(schema, format_checker=FormatChecker())
+ self._validator = Draft7Validator(
+ schema,
+ format_checker=Draft7Validator.FORMAT_CHECKER,
+ )
def _get_context(self, record: dict) -> dict: # noqa: ARG002
"""Return an empty dictionary by default.
@@ -238,7 +247,7 @@ def _add_sdc_metadata_to_record(
Args:
record: Individual record in the stream.
- message: TODO
+ message: The record message.
context: Stream partition or context dictionary.
"""
record["_sdc_extracted_at"] = message.get("time_extracted")
@@ -246,12 +255,13 @@ def _add_sdc_metadata_to_record(
tz=datetime.timezone.utc,
).isoformat()
record["_sdc_batched_at"] = (
- context.get("batch_start_time", None)
+ context.get("batch_start_time")
or datetime.datetime.now(tz=datetime.timezone.utc)
).isoformat()
record["_sdc_deleted_at"] = record.get("_sdc_deleted_at")
record["_sdc_sequence"] = int(round(time.time() * 1000))
record["_sdc_table_version"] = message.get("version")
+ record["_sdc_sync_started_at"] = self.sync_started_at
def _add_sdc_metadata_to_schema(self) -> None:
"""Add _sdc metadata columns.
@@ -270,7 +280,7 @@ def _add_sdc_metadata_to_schema(self) -> None:
"type": ["null", "string"],
"format": "date-time",
}
- for col in ("_sdc_sequence", "_sdc_table_version"):
+ for col in ("_sdc_sequence", "_sdc_table_version", "_sdc_sync_started_at"):
properties_dict[col] = {"type": ["null", "integer"]}
def _remove_sdc_metadata_from_schema(self) -> None:
@@ -287,6 +297,7 @@ def _remove_sdc_metadata_from_schema(self) -> None:
"_sdc_deleted_at",
"_sdc_sequence",
"_sdc_table_version",
+ "_sdc_sync_started_at",
):
properties_dict.pop(col, None)
@@ -305,6 +316,7 @@ def _remove_sdc_metadata_from_record(self, record: dict) -> None:
record.pop("_sdc_deleted_at", None)
record.pop("_sdc_sequence", None)
record.pop("_sdc_table_version", None)
+ record.pop("_sdc_sync_started_at", None)
# Record validation
@@ -361,14 +373,22 @@ def _parse_timestamps_in_record(
schema: TODO
treatment: TODO
"""
- for key in record:
+ for key, value in record.items():
+ if key not in schema["properties"]:
+ self.logger.warning("No schema for record field '%s'", key)
+ continue
datelike_type = get_datelike_property_type(schema["properties"][key])
if datelike_type:
- date_val = record[key]
+ date_val = value
try:
- if record[key] is not None:
- date_val = parser.parse(date_val)
- except parser.ParserError as ex:
+ if value is not None:
+ if datelike_type == "time":
+ date_val = datetime.time.fromisoformat(date_val)
+ elif datelike_type == "date":
+ date_val = datetime.date.fromisoformat(date_val)
+ else:
+ date_val = datetime.datetime.fromisoformat(date_val)
+ except ValueError as ex:
date_val = handle_invalid_timestamp_in_record(
record,
[key],
diff --git a/singer_sdk/sinks/sql.py b/singer_sdk/sinks/sql.py
index 6b6f8d1212..5c143818c9 100644
--- a/singer_sdk/sinks/sql.py
+++ b/singer_sdk/sinks/sql.py
@@ -20,7 +20,7 @@
if t.TYPE_CHECKING:
from sqlalchemy.sql import Executable
- from singer_sdk.plugin_base import PluginBase
+ from singer_sdk.target_base import Target
class SQLSink(BatchSink):
@@ -32,7 +32,7 @@ class SQLSink(BatchSink):
def __init__(
self,
- target: PluginBase,
+ target: Target,
stream_name: str,
schema: dict,
key_properties: list[str] | None,
@@ -98,13 +98,7 @@ def schema_name(self) -> str | None:
if default_target_schema:
return default_target_schema
- if len(parts) in {2, 3}:
- # Stream name is a two-part or three-part identifier.
- # Use the second-to-last part as the schema name.
- return self.conform_name(parts[-2], "schema")
-
- # Schema name not detected.
- return None
+ return self.conform_name(parts[-2], "schema") if len(parts) in {2, 3} else None
@property
def database_name(self) -> str | None:
@@ -322,15 +316,21 @@ def bulk_insert_records(
if isinstance(insert_sql, str):
insert_sql = sqlalchemy.text(insert_sql)
- conformed_records = (
- [self.conform_record(record) for record in records]
- if isinstance(records, list)
- else (self.conform_record(record) for record in records)
- )
+ conformed_records = [self.conform_record(record) for record in records]
+ property_names = list(self.conform_schema(schema)["properties"].keys())
+
+ # Create new record dicts with missing properties filled in with None
+ new_records = [
+ {name: record.get(name) for name in property_names}
+ for record in conformed_records
+ ]
+
self.logger.info("Inserting with SQL: %s", insert_sql)
+
with self.connector._connect() as conn, conn.begin():
- conn.execute(insert_sql, conformed_records)
- return len(conformed_records) if isinstance(conformed_records, list) else None
+ result = conn.execute(insert_sql, new_records)
+
+ return result.rowcount
def merge_upsert_from_table(
self,
diff --git a/singer_sdk/streams/core.py b/singer_sdk/streams/core.py
index 35bc2e79ef..f306f1ec4f 100644
--- a/singer_sdk/streams/core.py
+++ b/singer_sdk/streams/core.py
@@ -19,6 +19,7 @@
from singer_sdk.exceptions import (
AbortedSyncFailedException,
AbortedSyncPausedException,
+ InvalidReplicationKeyException,
InvalidStreamSortException,
MaxRecordsLimitException,
)
@@ -123,7 +124,7 @@ def __init__(
msg = "Missing argument or class variable 'name'."
raise ValueError(msg)
- self.logger: logging.Logger = tap.logger
+ self.logger: logging.Logger = tap.logger.getChild(self.name)
self.metrics_logger = tap.metrics_logger
self.tap_name: str = tap.name
self._config: dict = dict(tap.config)
@@ -211,13 +212,23 @@ def is_timestamp_replication_key(self) -> bool:
Returns:
True if the stream uses a timestamp-based replication key.
+
+ Raises:
+ InvalidReplicationKeyException: If the schema does not contain the
+ replication key.
"""
if not self.replication_key:
return False
type_dict = self.schema.get("properties", {}).get(self.replication_key)
+ if type_dict is None:
+ msg = f"Field '{self.replication_key}' is not in schema for stream '{self.name}'" # noqa: E501
+ raise InvalidReplicationKeyException(msg)
return is_datetime_type(type_dict)
- def get_starting_replication_key_value(self, context: dict | None) -> t.Any | None:
+ def get_starting_replication_key_value(
+ self,
+ context: dict | None,
+ ) -> t.Any | None: # noqa: ANN401
"""Get starting replication key.
Will return the value of the stream's replication key when `--state` is passed.
@@ -385,7 +396,7 @@ def _write_starting_replication_value(self, context: dict | None) -> None:
def get_replication_key_signpost(
self,
context: dict | None, # noqa: ARG002
- ) -> datetime.datetime | t.Any | None:
+ ) -> datetime.datetime | t.Any | None: # noqa: ANN401
"""Get the replication signpost.
For timestamp-based replication keys, this defaults to `utc_now()`. For
@@ -758,7 +769,7 @@ def _write_state_message(self) -> None:
if (not self._is_state_flushed) and (
self.tap_state != self._last_emitted_state
):
- singer.write_message(singer.StateMessage(value=self.tap_state))
+ self._tap.write_message(singer.StateMessage(value=self.tap_state))
self._last_emitted_state = copy.deepcopy(self.tap_state)
self._is_state_flushed = True
@@ -786,7 +797,7 @@ def _generate_schema_messages(
def _write_schema_message(self) -> None:
"""Write out a SCHEMA message with the stream schema."""
for schema_message in self._generate_schema_messages():
- singer.write_message(schema_message)
+ self._tap.write_message(schema_message)
@property
def mask(self) -> singer.SelectionMask:
@@ -838,7 +849,7 @@ def _write_record_message(self, record: dict) -> None:
record: A single stream record.
"""
for record_message in self._generate_record_messages(record):
- singer.write_message(record_message)
+ self._tap.write_message(record_message)
self._is_state_flushed = False
@@ -853,7 +864,7 @@ def _write_batch_message(
encoding: The encoding to use for the batch.
manifest: A list of filenames for the batch.
"""
- singer.write_message(
+ self._tap.write_message(
SDKBatchMessage(
stream=self.name,
encoding=encoding,
@@ -1255,7 +1266,7 @@ def get_child_context(self, record: dict, context: dict | None) -> dict | None:
Raises:
NotImplementedError: If the stream has children but this method is not
- overriden.
+ overridden.
"""
if context is None:
for child_stream in self.child_streams:
diff --git a/singer_sdk/streams/graphql.py b/singer_sdk/streams/graphql.py
index 01e5d41eec..fde4f99b94 100644
--- a/singer_sdk/streams/graphql.py
+++ b/singer_sdk/streams/graphql.py
@@ -8,8 +8,10 @@
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.streams.rest import RESTStream
+_TToken = t.TypeVar("_TToken")
-class GraphQLStream(RESTStream, metaclass=abc.ABCMeta):
+
+class GraphQLStream(RESTStream, t.Generic[_TToken], metaclass=abc.ABCMeta):
"""Abstract base class for API-type streams.
GraphQL streams inherit from the class `GraphQLStream`, which in turn inherits from
@@ -43,7 +45,7 @@ def query(self) -> str:
def prepare_request_payload(
self,
context: dict | None,
- next_page_token: t.Any | None,
+ next_page_token: _TToken | None,
) -> dict | None:
"""Prepare the data payload for the GraphQL API request.
diff --git a/singer_sdk/streams/rest.py b/singer_sdk/streams/rest.py
index 563956a5ff..378b32c732 100644
--- a/singer_sdk/streams/rest.py
+++ b/singer_sdk/streams/rest.py
@@ -149,7 +149,7 @@ def requests_session(self) -> requests.Session:
def validate_response(self, response: requests.Response) -> None:
"""Validate HTTP response.
- Checks for error status codes and wether they are fatal or retriable.
+ Checks for error status codes and whether they are fatal or retriable.
In case an error is deemed transient and can be safely retried, then this
method should raise an :class:`singer_sdk.exceptions.RetriableAPIError`.
@@ -179,9 +179,7 @@ def validate_response(self, response: requests.Response) -> None:
"""
if (
response.status_code in self.extra_retry_statuses
- or HTTPStatus.INTERNAL_SERVER_ERROR
- <= response.status_code
- <= max(HTTPStatus)
+ or response.status_code >= HTTPStatus.INTERNAL_SERVER_ERROR
):
msg = self.response_error_message(response)
raise RetriableAPIError(msg, response)
diff --git a/singer_sdk/streams/sql.py b/singer_sdk/streams/sql.py
index d5fb52219a..18d2d88627 100644
--- a/singer_sdk/streams/sql.py
+++ b/singer_sdk/streams/sql.py
@@ -210,5 +210,17 @@ def get_records(self, context: dict | None) -> t.Iterable[dict[str, t.Any]]:
continue
yield transformed_record
+ @property
+ def is_sorted(self) -> bool:
+ """Expect stream to be sorted.
+
+ When `True`, incremental streams will attempt to resume if unexpectedly
+ interrupted.
+
+ Returns:
+ `True` if stream is sorted. Defaults to `False`.
+ """
+ return self.replication_key is not None
+
__all__ = ["SQLStream", "SQLConnector"]
diff --git a/singer_sdk/tap_base.py b/singer_sdk/tap_base.py
index 1ba88522f2..ea521009cb 100644
--- a/singer_sdk/tap_base.py
+++ b/singer_sdk/tap_base.py
@@ -11,7 +11,7 @@
import click
-from singer_sdk._singerlib import Catalog, StateMessage, write_message
+from singer_sdk._singerlib import Catalog, StateMessage
from singer_sdk.configuration._dict_config import merge_missing_config_jsonschema
from singer_sdk.exceptions import AbortedSyncFailedException, AbortedSyncPausedException
from singer_sdk.helpers import _state
@@ -25,11 +25,13 @@
PluginCapabilities,
TapCapabilities,
)
+from singer_sdk.io_base import SingerWriter
from singer_sdk.plugin_base import PluginBase
if t.TYPE_CHECKING:
from pathlib import PurePath
+ from singer_sdk.connectors import SQLConnector
from singer_sdk.mapper import PluginMapper
from singer_sdk.streams import SQLStream, Stream
@@ -44,7 +46,7 @@ class CliTestOptionValue(Enum):
Disabled = "disabled"
-class Tap(PluginBase, metaclass=abc.ABCMeta):
+class Tap(PluginBase, SingerWriter, metaclass=abc.ABCMeta):
"""Abstract base class for taps.
The Tap class governs configuration, validation, and stream discovery for tap
@@ -124,10 +126,10 @@ def streams(self) -> dict[str, Stream]:
Returns:
A mapping of names to streams, using discovery or a provided catalog.
"""
- input_catalog = self.input_catalog
-
if self._streams is None:
self._streams = {}
+ input_catalog = self.input_catalog
+
for stream in self.load_streams():
if input_catalog is not None:
stream.apply_catalog(input_catalog)
@@ -411,7 +413,7 @@ def load_state(self, state: dict[str, t.Any]) -> None:
def _reset_state_progress_markers(self) -> None:
"""Clear prior jobs' progress markers at beginning of sync."""
- for _, state in self.state.get("bookmarks", {}).items():
+ for state in self.state.get("bookmarks", {}).values():
_state.reset_state_progress_markers(state)
for partition_state in state.get("partitions", []):
_state.reset_state_progress_markers(partition_state)
@@ -442,7 +444,7 @@ def sync_all(self) -> None:
"""Sync all streams."""
self._reset_state_progress_markers()
self._set_compatible_replication_methods()
- write_message(StateMessage(value=self.state))
+ self.write_message(StateMessage(value=self.state))
stream: Stream
for stream in self.streams.values():
@@ -617,6 +619,8 @@ class SQLTap(Tap):
default_stream_class: type[SQLStream]
dynamic_catalog: bool = True
+ _tap_connector: SQLConnector | None = None
+
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
"""Initialize the SQL tap.
@@ -629,6 +633,19 @@ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
self._catalog_dict: dict | None = None
super().__init__(*args, **kwargs)
+ @property
+ def tap_connector(self) -> SQLConnector:
+ """The connector object.
+
+ Returns:
+ The connector object.
+ """
+ if self._tap_connector is None:
+ self._tap_connector = self.default_stream_class.connector_class(
+ dict(self.config),
+ )
+ return self._tap_connector
+
@property
def catalog_dict(self) -> dict:
"""Get catalog dictionary.
@@ -642,7 +659,7 @@ def catalog_dict(self) -> dict:
if self.input_catalog:
return self.input_catalog.to_dict()
- connector = self.default_stream_class.connector_class(dict(self.config))
+ connector = self.tap_connector
result: dict[str, list[dict]] = {"streams": []}
result["streams"].extend(connector.discover_catalog_entries())
@@ -656,8 +673,11 @@ def discover_streams(self) -> list[Stream]:
Returns:
List of discovered Stream objects.
"""
- result: list[Stream] = []
- for catalog_entry in self.catalog_dict["streams"]:
- result.append(self.default_stream_class(self, catalog_entry))
-
- return result
+ return [
+ self.default_stream_class(
+ tap=self,
+ catalog_entry=catalog_entry,
+ connector=self.tap_connector,
+ )
+ for catalog_entry in self.catalog_dict["streams"]
+ ]
diff --git a/singer_sdk/target_base.py b/singer_sdk/target_base.py
index d62bbbfd84..c9d5d62efc 100644
--- a/singer_sdk/target_base.py
+++ b/singer_sdk/target_base.py
@@ -17,6 +17,9 @@
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers._compat import final
from singer_sdk.helpers.capabilities import (
+ ADD_RECORD_METADATA_CONFIG,
+ BATCH_CONFIG,
+ TARGET_LOAD_METHOD_CONFIG,
TARGET_SCHEMA_CONFIG,
CapabilitiesEnum,
PluginCapabilities,
@@ -28,8 +31,9 @@
if t.TYPE_CHECKING:
from pathlib import PurePath
+ from singer_sdk.connectors import SQLConnector
from singer_sdk.mapper import PluginMapper
- from singer_sdk.sinks import Sink
+ from singer_sdk.sinks import Sink, SQLSink
_MAX_PARALLELISM = 8
@@ -48,7 +52,7 @@ class Target(PluginBase, SingerReader, metaclass=abc.ABCMeta):
# Default class to use for creating new sink objects.
# Required if `Target.get_sink_class()` is not defined.
- default_sink_class: type[Sink] | None = None
+ default_sink_class: type[Sink]
def __init__(
self,
@@ -366,7 +370,7 @@ def _process_schema_message(self, message_dict: dict) -> None:
stream_name = message_dict["stream"]
schema = message_dict["schema"]
- key_properties = message_dict.get("key_properties", None)
+ key_properties = message_dict.get("key_properties")
do_registration = False
if stream_name not in self.mapper.stream_maps:
do_registration = True
@@ -570,10 +574,60 @@ def get_singer_command(cls: type[Target]) -> click.Command:
return command
+ @classmethod
+ def append_builtin_config(cls: type[Target], config_jsonschema: dict) -> None:
+ """Appends built-in config to `config_jsonschema` if not already set.
+
+ To customize or disable this behavior, developers may either override this class
+ method or override the `capabilities` property to disabled any unwanted
+ built-in capabilities.
+
+ For all except very advanced use cases, we recommend leaving these
+ implementations "as-is", since this provides the most choice to users and is
+ the most "future proof" in terms of taking advantage of built-in capabilities
+ which may be added in the future.
+
+ Args:
+ config_jsonschema: [description]
+ """
+
+ def _merge_missing(source_jsonschema: dict, target_jsonschema: dict) -> None:
+ # Append any missing properties in the target with those from source.
+ for k, v in source_jsonschema["properties"].items():
+ if k not in target_jsonschema["properties"]:
+ target_jsonschema["properties"][k] = v
+
+ _merge_missing(ADD_RECORD_METADATA_CONFIG, config_jsonschema)
+ _merge_missing(TARGET_LOAD_METHOD_CONFIG, config_jsonschema)
+
+ capabilities = cls.capabilities
+
+ if PluginCapabilities.BATCH in capabilities:
+ _merge_missing(BATCH_CONFIG, config_jsonschema)
+
+ super().append_builtin_config(config_jsonschema)
+
class SQLTarget(Target):
"""Target implementation for SQL destinations."""
+ _target_connector: SQLConnector | None = None
+
+ default_sink_class: type[SQLSink]
+
+ @property
+ def target_connector(self) -> SQLConnector:
+ """The connector object.
+
+ Returns:
+ The connector object.
+ """
+ if self._target_connector is None:
+ self._target_connector = self.default_sink_class.connector_class(
+ dict(self.config),
+ )
+ return self._target_connector
+
@classproperty
def capabilities(self) -> list[CapabilitiesEnum]:
"""Get target capabilities.
@@ -616,4 +670,113 @@ def _merge_missing(source_jsonschema: dict, target_jsonschema: dict) -> None:
super().append_builtin_config(config_jsonschema)
- pass
+ @final
+ def add_sqlsink(
+ self,
+ stream_name: str,
+ schema: dict,
+ key_properties: list[str] | None = None,
+ ) -> Sink:
+ """Create a sink and register it.
+
+ This method is internal to the SDK and should not need to be overridden.
+
+ Args:
+ stream_name: Name of the stream.
+ schema: Schema of the stream.
+ key_properties: Primary key of the stream.
+
+ Returns:
+ A new sink for the stream.
+ """
+ self.logger.info("Initializing '%s' target sink...", self.name)
+ sink_class = self.get_sink_class(stream_name=stream_name)
+ sink = sink_class(
+ target=self,
+ stream_name=stream_name,
+ schema=schema,
+ key_properties=key_properties,
+ connector=self.target_connector,
+ )
+ sink.setup()
+ self._sinks_active[stream_name] = sink
+
+ return sink
+
+ def get_sink_class(self, stream_name: str) -> type[SQLSink]:
+ """Get sink for a stream.
+
+ Developers can override this method to return a custom Sink type depending
+ on the value of `stream_name`. Optional when `default_sink_class` is set.
+
+ Args:
+ stream_name: Name of the stream.
+
+ Raises:
+ ValueError: If no :class:`singer_sdk.sinks.Sink` class is defined.
+
+ Returns:
+ The sink class to be used with the stream.
+ """
+ if self.default_sink_class:
+ return self.default_sink_class
+
+ msg = (
+ f"No sink class defined for '{stream_name}' and no default sink class "
+ "available."
+ )
+ raise ValueError(msg)
+
+ def get_sink(
+ self,
+ stream_name: str,
+ *,
+ record: dict | None = None,
+ schema: dict | None = None,
+ key_properties: list[str] | None = None,
+ ) -> Sink:
+ """Return a sink for the given stream name.
+
+ A new sink will be created if `schema` is provided and if either `schema` or
+ `key_properties` has changed. If so, the old sink becomes archived and held
+ until the next drain_all() operation.
+
+ Developers only need to override this method if they want to provide a different
+ sink depending on the values within the `record` object. Otherwise, please see
+ `default_sink_class` property and/or the `get_sink_class()` method.
+
+ Raises :class:`singer_sdk.exceptions.RecordsWithoutSchemaException` if sink does
+ not exist and schema is not sent.
+
+ Args:
+ stream_name: Name of the stream.
+ record: Record being processed.
+ schema: Stream schema.
+ key_properties: Primary key of the stream.
+
+ Returns:
+ The sink used for this target.
+ """
+ _ = record # Custom implementations may use record in sink selection.
+ if schema is None:
+ self._assert_sink_exists(stream_name)
+ return self._sinks_active[stream_name]
+
+ existing_sink = self._sinks_active.get(stream_name, None)
+ if not existing_sink:
+ return self.add_sqlsink(stream_name, schema, key_properties)
+
+ if (
+ existing_sink.schema != schema
+ or existing_sink.key_properties != key_properties
+ ):
+ self.logger.info(
+ "Schema or key properties for '%s' stream have changed. "
+ "Initializing a new '%s' sink...",
+ stream_name,
+ stream_name,
+ )
+ self._sinks_to_clear.append(self._sinks_active.pop(stream_name))
+ return self.add_sqlsink(stream_name, schema, key_properties)
+
+ return existing_sink
diff --git a/singer_sdk/testing/__init__.py b/singer_sdk/testing/__init__.py
index 24ce4ac9f6..83ca9aacca 100644
--- a/singer_sdk/testing/__init__.py
+++ b/singer_sdk/testing/__init__.py
@@ -2,13 +2,14 @@
from __future__ import annotations
+import typing as t
+import warnings
+
from .config import SuiteConfig
from .factory import get_tap_test_class, get_target_test_class
from .legacy import (
_get_tap_catalog,
_select_all,
- get_standard_tap_tests,
- get_standard_target_tests,
sync_end_to_end,
tap_sync_test,
tap_to_target_sync_test,
@@ -16,13 +17,42 @@
)
from .runners import SingerTestRunner, TapTestRunner, TargetTestRunner
+
+def __getattr__(name: str) -> t.Any: # noqa: ANN401
+ if name == "get_standard_tap_tests":
+ warnings.warn(
+ "The function singer_sdk.testing.get_standard_tap_tests is deprecated "
+ "and will be removed in a future release. Use get_tap_test_class instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ from .legacy import get_standard_tap_tests
+
+ return get_standard_tap_tests
+
+ if name == "get_standard_target_tests":
+ warnings.warn(
+ "The function singer_sdk.testing.get_standard_target_tests is deprecated "
+ "and will be removed in a future release. Use get_target_test_class "
+ "instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ from .legacy import get_standard_target_tests
+
+ return get_standard_target_tests
+
+ msg = f"module {__name__} has no attribute {name}"
+ raise AttributeError(msg)
+
+
__all__ = [
"get_tap_test_class",
"get_target_test_class",
"_get_tap_catalog",
"_select_all",
- "get_standard_tap_tests",
- "get_standard_target_tests",
"sync_end_to_end",
"tap_sync_test",
"tap_to_target_sync_test",
diff --git a/singer_sdk/testing/factory.py b/singer_sdk/testing/factory.py
index c7611955fa..30740b6ef4 100644
--- a/singer_sdk/testing/factory.py
+++ b/singer_sdk/testing/factory.py
@@ -8,6 +8,7 @@
from .config import SuiteConfig
from .runners import TapTestRunner, TargetTestRunner
from .suites import (
+ TestSuite,
tap_stream_attribute_tests,
tap_stream_tests,
tap_tests,
@@ -15,14 +16,31 @@
)
if t.TYPE_CHECKING:
- from singer_sdk import Tap, Target
+ from singer_sdk import Stream, Tap, Target
+ from singer_sdk.testing.templates import (
+ AttributeTestTemplate,
+ StreamTestTemplate,
+ TapTestTemplate,
+ )
class BaseTestClass:
"""Base test class."""
- params: t.ClassVar[dict] = {}
- param_ids: t.ClassVar[dict] = {}
+ params: dict[str, t.Any]
+ param_ids: dict[str, list[str]]
+
+ def __init_subclass__(cls, **kwargs: t.Any) -> None:
+ """Initialize a subclass.
+
+ Args:
+ **kwargs: Keyword arguments.
+ """
+ # Add empty params and param_ids attributes to a direct subclass but not to
+ # subclasses of subclasses
+ if cls.__base__ == BaseTestClass:
+ cls.params = {}
+ cls.param_ids = {}
class TapTestClassFactory:
@@ -132,7 +150,7 @@ def runner(self) -> TapTestRunner | TargetTestRunner:
return TapTestClass
- def _annotate_test_class( # noqa: C901
+ def _annotate_test_class(
self,
empty_test_class: type[BaseTestClass],
test_suites: list,
@@ -150,81 +168,101 @@ def _annotate_test_class( # noqa: C901
"""
for suite in test_suites:
if suite.kind == "tap":
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- setattr(empty_test_class, test_name, test.run)
+ self._with_tap_tests(empty_test_class, suite)
if suite.kind in {"tap_stream", "tap_stream_attribute"}:
streams = list(test_runner.new_tap().streams.values())
if suite.kind == "tap_stream":
- params = [
+ self._with_stream_tests(empty_test_class, suite, streams)
+
+ if suite.kind == "tap_stream_attribute":
+ self._with_stream_attribute_tests(empty_test_class, suite, streams)
+
+ return empty_test_class
+
+ def _with_tap_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[TapTestTemplate],
+ ) -> None:
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ setattr(empty_test_class, test_name, test.run)
+
+ def _with_stream_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[StreamTestTemplate],
+ streams: list[Stream],
+ ) -> None:
+ params = [
+ {
+ "stream": stream,
+ }
+ for stream in streams
+ ]
+ param_ids = [stream.name for stream in streams]
+
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ setattr(
+ empty_test_class,
+ test_name,
+ test.run,
+ )
+ empty_test_class.params[test_name] = params
+ empty_test_class.param_ids[test_name] = param_ids
+
+ def _with_stream_attribute_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[AttributeTestTemplate],
+ streams: list[Stream],
+ ) -> None:
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ test_params = []
+ test_ids: list[str] = []
+ for stream in streams:
+ final_schema = stream.stream_maps[-1].transformed_schema["properties"]
+ test_params.extend(
+ [
{
"stream": stream,
+ "attribute_name": prop_name,
}
- for stream in streams
- ]
- param_ids = [stream.name for stream in streams]
-
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- setattr(
- empty_test_class,
- test_name,
- test.run,
+ for prop_name, prop_schema in final_schema.items()
+ if test_class.evaluate(
+ stream=stream,
+ property_name=prop_name,
+ property_schema=prop_schema,
)
- empty_test_class.params[test_name] = params
- empty_test_class.param_ids[test_name] = param_ids
-
- if suite.kind == "tap_stream_attribute":
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- test_params = []
- test_ids = []
- for stream in streams:
- test_params.extend(
- [
- {
- "stream": stream,
- "attribute_name": property_name,
- }
- for property_name, property_schema in stream.schema[
- "properties"
- ].items()
- if test_class.evaluate(
- stream=stream,
- property_name=property_name,
- property_schema=property_schema,
- )
- ],
- )
- test_ids.extend(
- [
- f"{stream.name}.{property_name}"
- for property_name, property_schema in stream.schema[
- "properties"
- ].items()
- if test_class.evaluate(
- stream=stream,
- property_name=property_name,
- property_schema=property_schema,
- )
- ],
- )
-
- if test_params:
- setattr(
- empty_test_class,
- test_name,
- test.run,
- )
- empty_test_class.params[test_name] = test_params
- empty_test_class.param_ids[test_name] = test_ids
+ ],
+ )
+ test_ids.extend(
+ [
+ f"{stream.name}.{prop_name}"
+ for prop_name, prop_schema in final_schema.items()
+ if test_class.evaluate(
+ stream=stream,
+ property_name=prop_name,
+ property_schema=prop_schema,
+ )
+ ],
+ )
- return empty_test_class
+ if test_params:
+ setattr(
+ empty_test_class,
+ test_name,
+ test.run,
+ )
+ empty_test_class.params[test_name] = test_params
+ empty_test_class.param_ids[test_name] = test_ids
class TargetTestClassFactory:
diff --git a/singer_sdk/testing/legacy.py b/singer_sdk/testing/legacy.py
index 5baa94034c..5329b32244 100644
--- a/singer_sdk/testing/legacy.py
+++ b/singer_sdk/testing/legacy.py
@@ -150,10 +150,7 @@ def _get_tap_catalog(
# Test discovery
tap.run_discovery()
catalog_dict = tap.catalog_dict
- if select_all:
- return _select_all(catalog_dict)
-
- return catalog_dict
+ return _select_all(catalog_dict) if select_all else catalog_dict
def _select_all(catalog_dict: dict) -> dict:
diff --git a/singer_sdk/testing/runners.py b/singer_sdk/testing/runners.py
index 96416de95e..71f2943358 100644
--- a/singer_sdk/testing/runners.py
+++ b/singer_sdk/testing/runners.py
@@ -242,7 +242,9 @@ def target_input(self) -> t.IO[str]:
if self.input_io:
self._input = self.input_io
elif self.input_filepath:
- self._input = Path(self.input_filepath).open(encoding="utf8")
+ self._input = Path(self.input_filepath).open( # noqa: SIM115
+ encoding="utf8",
+ )
return t.cast(t.IO[str], self._input)
@target_input.setter
diff --git a/singer_sdk/testing/suites.py b/singer_sdk/testing/suites.py
index 0f8a9fabe0..df93c86d27 100644
--- a/singer_sdk/testing/suites.py
+++ b/singer_sdk/testing/suites.py
@@ -37,21 +37,22 @@
TargetOptionalAttributes,
TargetRecordBeforeSchemaTest,
TargetRecordMissingKeyProperty,
+ TargetRecordMissingOptionalFields,
TargetSchemaNoProperties,
TargetSchemaUpdates,
TargetSpecialCharsInAttributes,
)
+from .templates import TestTemplate
-if t.TYPE_CHECKING:
- from .templates import TapTestTemplate, TargetTestTemplate, TestTemplate
+T = t.TypeVar("T", bound=TestTemplate)
@dataclass
-class TestSuite:
+class TestSuite(t.Generic[T]):
"""Test Suite container class."""
kind: str
- tests: list[type[TestTemplate] | type[TapTestTemplate] | type[TargetTestTemplate]]
+ tests: list[type[T]]
# Tap Test Suites
@@ -103,6 +104,7 @@ class TestSuite:
TargetOptionalAttributes,
TargetRecordBeforeSchemaTest,
TargetRecordMissingKeyProperty,
+ TargetRecordMissingOptionalFields,
TargetSchemaNoProperties,
TargetSchemaUpdates,
TargetSpecialCharsInAttributes,
diff --git a/singer_sdk/testing/tap_tests.py b/singer_sdk/testing/tap_tests.py
index a95720d571..8008f2ffca 100644
--- a/singer_sdk/testing/tap_tests.py
+++ b/singer_sdk/testing/tap_tests.py
@@ -93,16 +93,20 @@ def test(self) -> None:
class StreamCatalogSchemaMatchesRecordTest(StreamTestTemplate):
"""Test all attributes in the catalog schema are present in the record schema."""
- name = "catalog_schema_matches_record"
+ name = "transformed_catalog_schema_matches_record"
def test(self) -> None:
"""Run test."""
- stream_catalog_keys = set(self.stream.schema["properties"].keys())
+ stream_transformed_keys = set(
+ self.stream.stream_maps[-1].transformed_schema["properties"].keys(),
+ )
stream_record_keys = set().union(*(d.keys() for d in self.stream_records))
- diff = stream_catalog_keys - stream_record_keys
+ diff = stream_transformed_keys - stream_record_keys
if diff:
warnings.warn(
- UserWarning(f"Fields in catalog but not in records: ({diff})"),
+ UserWarning(
+ f"Fields in transformed catalog but not in records: ({diff})",
+ ),
stacklevel=2,
)
@@ -110,14 +114,16 @@ def test(self) -> None:
class StreamRecordSchemaMatchesCatalogTest(StreamTestTemplate):
"""Test all attributes in the record schema are present in the catalog schema."""
- name = "record_schema_matches_catalog"
+ name = "record_schema_matches_transformed_catalog"
def test(self) -> None:
"""Run test."""
- stream_catalog_keys = set(self.stream.schema["properties"].keys())
+ stream_transformed_keys = set(
+ self.stream.stream_maps[-1].transformed_schema["properties"].keys(),
+ )
stream_record_keys = set().union(*(d.keys() for d in self.stream_records))
- diff = stream_record_keys - stream_catalog_keys
- assert not diff, f"Fields in records but not in catalog: ({diff})"
+ diff = stream_record_keys - stream_transformed_keys
+ assert not diff, f"Fields in records but not in transformed catalog: ({diff})"
class StreamRecordMatchesStreamSchema(StreamTestTemplate):
@@ -185,12 +191,12 @@ def test(self) -> None:
Raises:
AssertionError: if value cannot be parsed as a datetime.
"""
- for v in self.non_null_attribute_values:
- try:
+ try:
+ for v in self.non_null_attribute_values:
error_message = f"Unable to parse value ('{v}') with datetime parser."
assert parser.parse(v), error_message
- except parser.ParserError as e:
- raise AssertionError(error_message) from e
+ except parser.ParserError as e:
+ raise AssertionError(error_message) from e
@classmethod
def evaluate(
diff --git a/singer_sdk/testing/target_test_streams/__init__.py b/singer_sdk/testing/target_test_streams/__init__.py
new file mode 100644
index 0000000000..14d313288d
--- /dev/null
+++ b/singer_sdk/testing/target_test_streams/__init__.py
@@ -0,0 +1 @@
+"""Singer output samples, used for testing target behavior."""
diff --git a/singer_sdk/testing/target_test_streams/record_missing_fields.singer b/singer_sdk/testing/target_test_streams/record_missing_fields.singer
new file mode 100644
index 0000000000..a398f6bd66
--- /dev/null
+++ b/singer_sdk/testing/target_test_streams/record_missing_fields.singer
@@ -0,0 +1,4 @@
+{"type": "SCHEMA", "stream": "record_missing_fields", "key_properties": ["id"], "schema": {"type": "object", "properties": {"id": {"type": "integer"}, "optional": {"type": "string"}}, "required": ["id"]}}
+{"type": "RECORD", "stream": "record_missing_fields", "record": {"id": 1, "optional": "now you see me"}}
+{"type": "RECORD", "stream": "record_missing_fields", "record": {"id": 2}}
+{"type": "STATE", "value": {}}
diff --git a/singer_sdk/testing/target_tests.py b/singer_sdk/testing/target_tests.py
index 8412329c57..96e0b0d59c 100644
--- a/singer_sdk/testing/target_tests.py
+++ b/singer_sdk/testing/target_tests.py
@@ -139,3 +139,9 @@ class TargetSpecialCharsInAttributes(TargetFileTestTemplate):
"""Test Target handles special chars in attributes."""
name = "special_chars_in_attributes"
+
+
+class TargetRecordMissingOptionalFields(TargetFileTestTemplate):
+ """Test Target handles record missing optional fields."""
+
+ name = "record_missing_fields"
diff --git a/singer_sdk/testing/templates.py b/singer_sdk/testing/templates.py
index b43d378306..0f01e3f49a 100644
--- a/singer_sdk/testing/templates.py
+++ b/singer_sdk/testing/templates.py
@@ -7,6 +7,9 @@
import warnings
from pathlib import Path
+from singer_sdk.helpers._compat import resources
+from singer_sdk.testing import target_test_streams
+
if t.TYPE_CHECKING:
from singer_sdk.streams import Stream
@@ -334,5 +337,4 @@ def singer_filepath(self) -> Path:
Returns:
The expected Path to this tests singer file.
"""
- current_dir = Path(__file__).resolve().parent
- return current_dir / "target_test_streams" / f"{self.name}.singer"
+ return resources.files(target_test_streams).joinpath(f"{self.name}.singer") # type: ignore[no-any-return]
diff --git a/singer_sdk/typing.py b/singer_sdk/typing.py
index a8f654c904..37c1f40c1f 100644
--- a/singer_sdk/typing.py
+++ b/singer_sdk/typing.py
@@ -58,7 +58,10 @@
import typing as t
import sqlalchemy
-from jsonschema import ValidationError, Validator, validators
+from jsonschema import ValidationError, validators
+
+if t.TYPE_CHECKING:
+ from jsonschema.protocols import Validator
from singer_sdk.helpers._typing import (
JSONSCHEMA_ANNOTATION_SECRET,
@@ -490,6 +493,26 @@ def type_dict(self) -> dict: # type: ignore[override]
return {"type": "array", "items": self.wrapped_type.type_dict, **self.extras}
+class AnyType(JSONTypeHelper):
+ """Any type."""
+
+ def __init__(
+ self,
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+
+ @DefaultInstanceProperty
+ def type_dict(self) -> dict:
+ """Get type dictionary.
+
+ Returns:
+ A dictionary describing the type.
+ """
+ return {**self.extras}
+
+
class Property(JSONTypeHelper[T], t.Generic[T]):
"""Generic Property. Should be nested within a `PropertiesList`."""
@@ -942,12 +965,14 @@ def to_jsonschema_type(
msg = "Expected `str` or a SQLAlchemy `TypeEngine` object or type."
raise ValueError(msg)
- # Look for the type name within the known SQL type names:
- for sqltype, jsonschema_type in sqltype_lookup.items():
- if sqltype.lower() in type_name.lower():
- return jsonschema_type
-
- return sqltype_lookup["string"] # safe failover to str
+ return next(
+ (
+ jsonschema_type
+ for sqltype, jsonschema_type in sqltype_lookup.items()
+ if sqltype.lower() in type_name.lower()
+ ),
+ sqltype_lookup["string"], # safe failover to str
+ )
def _jsonschema_type_check(jsonschema_type: dict, type_check: tuple[str]) -> bool:
@@ -958,23 +983,19 @@ def _jsonschema_type_check(jsonschema_type: dict, type_check: tuple[str]) -> boo
type_check: A tuple of type strings to look for.
Returns:
- True if the schema suports the type.
+ True if the schema supports the type.
"""
if "type" in jsonschema_type:
if isinstance(jsonschema_type["type"], (list, tuple)):
for schema_type in jsonschema_type["type"]:
if schema_type in type_check:
return True
- else:
- if jsonschema_type.get("type") in type_check: # noqa: PLR5501
- return True
+ elif jsonschema_type.get("type") in type_check:
+ return True
- if any(
+ return any(
_jsonschema_type_check(t, type_check) for t in jsonschema_type.get("anyOf", ())
- ):
- return True
-
- return False
+ )
def to_sql_type( # noqa: PLR0911, C901
diff --git a/tests/_singerlib/test_messages.py b/tests/_singerlib/test_messages.py
index 47a36aca68..e10259497c 100644
--- a/tests/_singerlib/test_messages.py
+++ b/tests/_singerlib/test_messages.py
@@ -1,14 +1,16 @@
from __future__ import annotations
+import datetime
import io
from contextlib import redirect_stdout
-from datetime import datetime
import pytest
-from pytz import UTC, timezone
+from pytz import timezone
import singer_sdk._singerlib as singer
-from singer_sdk._singerlib.messages import format_message
+from singer_sdk.io_base import SingerWriter
+
+UTC = datetime.timezone.utc
def test_exclude_null_dict():
@@ -17,22 +19,24 @@ def test_exclude_null_dict():
def test_format_message():
+ singerwriter = SingerWriter()
message = singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
)
- assert format_message(message) == (
+ assert singerwriter.format_message(message) == (
'{"type": "RECORD", "stream": "test", "record": {"id": 1, "name": "test"}}'
)
def test_write_message():
+ singerwriter = SingerWriter()
message = singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
)
with redirect_stdout(io.StringIO()) as out:
- singer.write_message(message)
+ singerwriter.write_message(message)
assert out.getvalue() == (
'{"type": "RECORD", "stream": "test", "record": {"id": 1, "name": "test"}}\n'
@@ -55,19 +59,33 @@ def test_record_message():
assert singer.RecordMessage.from_dict(record.to_dict()) == record
+def test_record_message_parse_time_extracted():
+ message_dic = {
+ "type": "RECORD",
+ "stream": "test",
+ "record": {"id": 1, "name": "test"},
+ "time_extracted": "2021-01-01T00:00:00Z",
+ }
+ record = singer.RecordMessage.from_dict(message_dic)
+ assert record.type == "RECORD"
+ assert record.stream == "test"
+ assert record.record == {"id": 1, "name": "test"}
+ assert record.time_extracted == datetime.datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC)
+
+
def test_record_message_naive_time_extracted():
"""Check that record message' time_extracted must be timezone-aware."""
with pytest.raises(ValueError, match="must be either None or an aware datetime"):
singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
- time_extracted=datetime(2021, 1, 1), # noqa: DTZ001
+ time_extracted=datetime.datetime(2021, 1, 1), # noqa: DTZ001
)
def test_record_message_time_extracted_to_utc():
"""Check that record message's time_extracted is converted to UTC."""
- naive = datetime(2021, 1, 1, 12) # noqa: DTZ001
+ naive = datetime.datetime(2021, 1, 1, 12) # noqa: DTZ001
nairobi = timezone("Africa/Nairobi")
record = singer.RecordMessage(
@@ -75,7 +93,7 @@ def test_record_message_time_extracted_to_utc():
record={"id": 1, "name": "test"},
time_extracted=nairobi.localize(naive),
)
- assert record.time_extracted == datetime(2021, 1, 1, 9, tzinfo=UTC)
+ assert record.time_extracted == datetime.datetime(2021, 1, 1, 9, tzinfo=UTC)
def test_schema_message():
diff --git a/tests/_singerlib/test_schema.py b/tests/_singerlib/test_schema.py
index 07589f4312..4fa72c5a8a 100644
--- a/tests/_singerlib/test_schema.py
+++ b/tests/_singerlib/test_schema.py
@@ -4,10 +4,10 @@
from singer_sdk._singerlib import Schema, resolve_schema_references
-STRING_SCHEMA = Schema(type="string", maxLength=32)
-STRING_DICT = {"type": "string", "maxLength": 32}
-INTEGER_SCHEMA = Schema(type="integer", maximum=1000000)
-INTEGER_DICT = {"type": "integer", "maximum": 1000000}
+STRING_SCHEMA = Schema(type="string", maxLength=32, default="")
+STRING_DICT = {"type": "string", "maxLength": 32, "default": ""}
+INTEGER_SCHEMA = Schema(type="integer", maximum=1000000, default=0)
+INTEGER_DICT = {"type": "integer", "maximum": 1000000, "default": 0}
ARRAY_SCHEMA = Schema(type="array", items=INTEGER_SCHEMA)
ARRAY_DICT = {"type": "array", "items": INTEGER_DICT}
OBJECT_SCHEMA = Schema(
diff --git a/tests/conftest.py b/tests/conftest.py
index cb201c9a12..cb392bd3ab 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -8,14 +8,21 @@
import typing as t
import pytest
+from sqlalchemy import __version__ as sqlalchemy_version
+from singer_sdk import SQLConnector
from singer_sdk import typing as th
-from singer_sdk.sinks import BatchSink
-from singer_sdk.target_base import Target
+from singer_sdk.helpers._typing import DatetimeErrorTreatmentEnum
+from singer_sdk.helpers.capabilities import PluginCapabilities
+from singer_sdk.sinks import BatchSink, SQLSink
+from singer_sdk.target_base import SQLTarget, Target
if t.TYPE_CHECKING:
from _pytest.config import Config
+ from singer_sdk.helpers.capabilities import CapabilitiesEnum
+
+
SYSTEMS = {"linux", "darwin", "windows"}
pytest_plugins = ("singer_sdk.testing.pytest_plugin",)
@@ -39,6 +46,11 @@ def pytest_runtest_setup(item):
pytest.skip(f"cannot run on platform {system}")
+def pytest_report_header() -> list[str]:
+ """Return a list of strings to be displayed in the header of the report."""
+ return [f"sqlalchemy: {sqlalchemy_version}"]
+
+
@pytest.fixture(scope="class")
def outdir() -> t.Generator[str, None, None]:
"""Create a temporary directory for cookiecutters and target output."""
@@ -64,6 +76,7 @@ class BatchSinkMock(BatchSink):
"""A mock Sink class."""
name = "batch-sink-mock"
+ datetime_error_treatment = DatetimeErrorTreatmentEnum.MAX
def __init__(
self,
@@ -97,6 +110,70 @@ class TargetMock(Target):
name = "target-mock"
config_jsonschema = th.PropertiesList().to_dict()
default_sink_class = BatchSinkMock
+ capabilities: t.ClassVar[list[CapabilitiesEnum]] = [
+ *Target.capabilities,
+ PluginCapabilities.BATCH,
+ ]
+
+ def __init__(self, *args, **kwargs):
+ """Create the Mock target sync."""
+ super().__init__(*args, **kwargs)
+ self.state_messages_written: list[dict] = []
+ self.records_written: list[dict] = []
+ self.num_records_processed: int = 0
+ self.num_batches_processed: int = 0
+
+ def _write_state_message(self, state: dict):
+ """Emit the stream's latest state."""
+ super()._write_state_message(state)
+ self.state_messages_written.append(state)
+
+
+class SQLConnectorMock(SQLConnector):
+ """A Mock SQLConnector class."""
+
+
+class SQLSinkMock(SQLSink):
+ """A mock Sink class."""
+
+ name = "sql-sink-mock"
+ connector_class = SQLConnectorMock
+
+ def __init__(
+ self,
+ target: SQLTargetMock,
+ stream_name: str,
+ schema: dict,
+ key_properties: list[str] | None,
+ connector: SQLConnector | None = None,
+ ):
+ """Create the Mock batch-based sink."""
+ self._connector: SQLConnector
+ self._connector = connector or self.connector_class(dict(target.config))
+ super().__init__(target, stream_name, schema, key_properties, connector)
+ self.target = target
+
+ def process_record(self, record: dict, context: dict) -> None:
+ """Tracks the count of processed records."""
+ self.target.num_records_processed += 1
+ super().process_record(record, context)
+
+ def process_batch(self, context: dict) -> None:
+ """Write to mock trackers."""
+ self.target.records_written.extend(context["records"])
+ self.target.num_batches_processed += 1
+
+ @property
+ def key_properties(self) -> list[str]:
+ return [key.upper() for key in super().key_properties]
+
+
+class SQLTargetMock(SQLTarget):
+ """A mock Target class."""
+
+ name = "sql-target-mock"
+ config_jsonschema = th.PropertiesList().to_dict()
+ default_sink_class = SQLSinkMock
def __init__(self, *args, **kwargs):
"""Create the Mock target sync."""
diff --git a/tests/core/conftest.py b/tests/core/conftest.py
new file mode 100644
index 0000000000..06355ccfef
--- /dev/null
+++ b/tests/core/conftest.py
@@ -0,0 +1,101 @@
+"""Tap, target and stream test fixtures."""
+
+from __future__ import annotations
+
+import typing as t
+
+import pendulum
+import pytest
+
+from singer_sdk import Stream, Tap
+from singer_sdk.typing import (
+ DateTimeType,
+ IntegerType,
+ PropertiesList,
+ Property,
+ StringType,
+)
+
+
+class SimpleTestStream(Stream):
+ """Test stream class."""
+
+ name = "test"
+ schema = PropertiesList(
+ Property("id", IntegerType, required=True),
+ Property("value", StringType, required=True),
+ Property("updatedAt", DateTimeType, required=True),
+ ).to_dict()
+ replication_key = "updatedAt"
+
+ def __init__(self, tap: Tap):
+ """Create a new stream."""
+ super().__init__(tap, schema=self.schema, name=self.name)
+
+ def get_records(
+ self,
+ context: dict | None, # noqa: ARG002
+ ) -> t.Iterable[dict[str, t.Any]]:
+ """Generate records."""
+ yield {"id": 1, "value": "Egypt"}
+ yield {"id": 2, "value": "Germany"}
+ yield {"id": 3, "value": "India"}
+
+
+class UnixTimestampIncrementalStream(SimpleTestStream):
+ name = "unix_ts"
+ schema = PropertiesList(
+ Property("id", IntegerType, required=True),
+ Property("value", StringType, required=True),
+ Property("updatedAt", IntegerType, required=True),
+ ).to_dict()
+ replication_key = "updatedAt"
+
+
+class UnixTimestampIncrementalStream2(UnixTimestampIncrementalStream):
+ name = "unix_ts_override"
+
+ def compare_start_date(self, value: str, start_date_value: str) -> str:
+ """Compare a value to a start date value."""
+
+ start_timestamp = pendulum.parse(start_date_value).format("X")
+ return max(value, start_timestamp, key=float)
+
+
+class SimpleTestTap(Tap):
+ """Test tap class."""
+
+ name = "test-tap"
+ config_jsonschema = PropertiesList(
+ Property("username", StringType, required=True),
+ Property("password", StringType, required=True),
+ Property("start_date", DateTimeType),
+ additional_properties=False,
+ ).to_dict()
+
+ def discover_streams(self) -> list[Stream]:
+ """List all streams."""
+ return [
+ SimpleTestStream(self),
+ UnixTimestampIncrementalStream(self),
+ UnixTimestampIncrementalStream2(self),
+ ]
+
+
+@pytest.fixture
+def tap_class():
+ """Return the tap class."""
+ return SimpleTestTap
+
+
+@pytest.fixture
+def tap() -> SimpleTestTap:
+ """Tap instance."""
+ return SimpleTestTap(
+ config={
+ "username": "utest",
+ "password": "ptest",
+ "start_date": "2021-01-01",
+ },
+ parse_env_config=False,
+ )
diff --git a/tests/core/rest/test_backoff.py b/tests/core/rest/test_backoff.py
index fbe391e557..7a2ba39b88 100644
--- a/tests/core/rest/test_backoff.py
+++ b/tests/core/rest/test_backoff.py
@@ -74,6 +74,14 @@ def custom_validation_stream(rest_tap):
match=r"503 Server Error: Service Unavailable for path: /dummy",
),
),
+ (
+ 521, # Cloudflare custom status code higher than max(HTTPStatus)
+ "Web Server Is Down",
+ pytest.raises(
+ RetriableAPIError,
+ match=r"521 Server Error: Web Server Is Down for path: /dummy",
+ ),
+ ),
(
429,
"Too Many Requests",
@@ -84,7 +92,7 @@ def custom_validation_stream(rest_tap):
),
(200, "OK", nullcontext()),
],
- ids=["client-error", "server-error", "rate-limited", "ok"],
+ ids=["client-error", "server-error", "server-error", "rate-limited", "ok"],
)
def test_status_code_api(basic_rest_stream, status_code, reason, expectation):
fake_response = requests.Response()
diff --git a/tests/core/sinks/__init__.py b/tests/core/sinks/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/core/sinks/test_sdc_metadata.py b/tests/core/sinks/test_sdc_metadata.py
new file mode 100644
index 0000000000..dbb9a79765
--- /dev/null
+++ b/tests/core/sinks/test_sdc_metadata.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+import datetime
+
+import time_machine
+
+from tests.conftest import BatchSinkMock, TargetMock
+
+
+def test_sdc_metadata():
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ target = TargetMock()
+
+ sink = BatchSinkMock(
+ target,
+ "users",
+ {"type": "object", "properties": {"id": {"type": "integer"}}},
+ ["id"],
+ )
+
+ record_message = {
+ "type": "RECORD",
+ "stream": "users",
+ "record": {"id": 1},
+ "time_extracted": "2021-01-01T00:00:00+00:00",
+ "version": 100,
+ }
+ record = record_message["record"]
+
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, 0, 5, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ sink._add_sdc_metadata_to_record(record, record_message, {})
+
+ assert record == {
+ "id": 1,
+ "_sdc_extracted_at": "2021-01-01T00:00:00+00:00",
+ "_sdc_received_at": "2023-01-01T00:05:00+00:00",
+ "_sdc_batched_at": "2023-01-01T00:05:00+00:00",
+ "_sdc_deleted_at": None,
+ "_sdc_sequence": 1672531500000,
+ "_sdc_table_version": 100,
+ "_sdc_sync_started_at": 1672531200000,
+ }
+
+ sink._add_sdc_metadata_to_schema()
+ assert sink.schema == {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "_sdc_extracted_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_received_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_batched_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_deleted_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_sequence": {"type": ["null", "integer"]},
+ "_sdc_table_version": {"type": ["null", "integer"]},
+ "_sdc_sync_started_at": {"type": ["null", "integer"]},
+ },
+ }
diff --git a/tests/core/sinks/test_validation.py b/tests/core/sinks/test_validation.py
new file mode 100644
index 0000000000..0672c9f49a
--- /dev/null
+++ b/tests/core/sinks/test_validation.py
@@ -0,0 +1,128 @@
+from __future__ import annotations
+
+import datetime
+import itertools
+
+import pytest
+
+from tests.conftest import BatchSinkMock, TargetMock
+
+
+def test_validate_record():
+ target = TargetMock()
+ sink = BatchSinkMock(
+ target,
+ "users",
+ {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "created_at": {"type": "string", "format": "date-time"},
+ "created_at_date": {"type": "string", "format": "date"},
+ "created_at_time": {"type": "string", "format": "time"},
+ "invalid_datetime": {"type": "string", "format": "date-time"},
+ },
+ },
+ ["id"],
+ )
+
+ record = {
+ "id": 1,
+ "created_at": "2021-01-01T00:00:00+00:00",
+ "created_at_date": "2021-01-01",
+ "created_at_time": "00:01:00+00:00",
+ "missing_datetime": "2021-01-01T00:00:00+00:00",
+ "invalid_datetime": "not a datetime",
+ }
+ updated_record = sink._validate_and_parse(record)
+
+ assert updated_record["created_at"] == datetime.datetime(
+ 2021,
+ 1,
+ 1,
+ 0,
+ 0,
+ tzinfo=datetime.timezone.utc,
+ )
+ assert updated_record["created_at_date"] == datetime.date(
+ 2021,
+ 1,
+ 1,
+ )
+ assert updated_record["created_at_time"] == datetime.time(
+ 0,
+ 1,
+ tzinfo=datetime.timezone.utc,
+ )
+ assert updated_record["missing_datetime"] == "2021-01-01T00:00:00+00:00"
+ assert updated_record["invalid_datetime"] == "9999-12-31 23:59:59.999999"
+
+
+@pytest.fixture
+def bench_sink() -> BatchSinkMock:
+ target = TargetMock()
+ return BatchSinkMock(
+ target,
+ "users",
+ {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "created_at": {"type": "string", "format": "date-time"},
+ "updated_at": {"type": "string", "format": "date-time"},
+ "deleted_at": {"type": "string", "format": "date-time"},
+ },
+ },
+ ["id"],
+ )
+
+
+@pytest.fixture
+def bench_record():
+ return {
+ "id": 1,
+ "created_at": "2021-01-01T00:08:00-07:00",
+ "updated_at": "2022-01-02T00:09:00-07:00",
+ "deleted_at": "2023-01-03T00:10:00.0000",
+ }
+
+
+def test_bench_parse_timestamps_in_record(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink method _parse_timestamps_in_record."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_parse_timestamps_in_record():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ _ = sink._parse_timestamps_in_record(
+ record.copy(), sink.schema, sink.datetime_error_treatment
+ )
+
+ benchmark(run_parse_timestamps_in_record)
+
+
+def test_bench_validate_and_parse(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink method _validate_and_parse."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_validate_and_parse():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ _ = sink._validate_and_parse(record.copy())
+
+ benchmark(run_validate_and_parse)
+
+
+def test_bench_validate_record_with_schema(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink._validator method validate."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_validate_record_with_schema():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ sink._validator.validate(record)
+
+ benchmark(run_validate_record_with_schema)
diff --git a/tests/core/test_connector_sql.py b/tests/core/test_connector_sql.py
index 1c04dbcdd6..c07188ff79 100644
--- a/tests/core/test_connector_sql.py
+++ b/tests/core/test_connector_sql.py
@@ -1,14 +1,19 @@
from __future__ import annotations
+import typing as t
+from decimal import Decimal
from unittest import mock
import pytest
import sqlalchemy
-from sqlalchemy.dialects import sqlite
+from sqlalchemy.dialects import registry, sqlite
from singer_sdk.connectors import SQLConnector
from singer_sdk.exceptions import ConfigValidationError
+if t.TYPE_CHECKING:
+ from sqlalchemy.engine import Engine
+
def stringify(in_dict):
return {k: str(v) for k, v in in_dict.items()}
@@ -258,3 +263,113 @@ def test_merge_generic_sql_types(
):
merged_type = connector.merge_sql_types(types)
assert isinstance(merged_type, expected_type)
+
+ def test_engine_json_serialization(self, connector: SQLConnector):
+ engine = connector._engine
+ meta = sqlalchemy.MetaData()
+ table = sqlalchemy.Table(
+ "test_table",
+ meta,
+ sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True),
+ sqlalchemy.Column("attrs", sqlalchemy.JSON),
+ )
+ meta.create_all(engine)
+ with engine.connect() as conn:
+ conn.execute(
+ table.insert(),
+ [
+ {"attrs": {"x": Decimal("1.0")}},
+ {"attrs": {"x": Decimal("2.0"), "y": [1, 2, 3]}},
+ ],
+ )
+ result = conn.execute(table.select())
+ assert result.fetchall() == [
+ (1, {"x": Decimal("1.0")}),
+ (2, {"x": Decimal("2.0"), "y": [1, 2, 3]}),
+ ]
+
+
+class DuckDBConnector(SQLConnector):
+ allow_column_alter = True
+
+ @staticmethod
+ def get_column_alter_ddl(
+ table_name: str,
+ column_name: str,
+ column_type: sqlalchemy.types.TypeEngine,
+ ) -> sqlalchemy.DDL:
+ return sqlalchemy.DDL(
+ "ALTER TABLE %(table_name)s ALTER COLUMN %(column_name)s TYPE %(column_type)s", # noqa: E501
+ {
+ "table_name": table_name,
+ "column_name": column_name,
+ "column_type": column_type,
+ },
+ )
+
+
+class TestDuckDBConnector:
+ @pytest.fixture
+ def connector(self):
+ return DuckDBConnector(config={"sqlalchemy_url": "duckdb:///"})
+
+ def test_create_schema(self, connector: DuckDBConnector):
+ engine = connector._engine
+ connector.create_schema("test_schema")
+ inspector = sqlalchemy.inspect(engine)
+ assert "test_schema" in inspector.get_schema_names()
+
+ def test_column_rename(self, connector: DuckDBConnector):
+ engine = connector._engine
+ meta = sqlalchemy.MetaData()
+ _ = sqlalchemy.Table(
+ "test_table",
+ meta,
+ sqlalchemy.Column("id", sqlalchemy.Integer),
+ sqlalchemy.Column("old_name", sqlalchemy.String),
+ )
+ meta.create_all(engine)
+
+ connector.rename_column("test_table", "old_name", "new_name")
+
+ with engine.connect() as conn:
+ result = conn.execute(
+ sqlalchemy.text("SELECT * FROM test_table"),
+ )
+ assert result.keys() == ["id", "new_name"]
+
+ def test_adapt_column_type(self, connector: DuckDBConnector):
+ connector.allow_column_alter = True
+ engine = connector._engine
+ meta = sqlalchemy.MetaData()
+ _ = sqlalchemy.Table(
+ "test_table",
+ meta,
+ sqlalchemy.Column("id", sqlalchemy.Integer),
+ sqlalchemy.Column("name", sqlalchemy.Integer),
+ )
+ meta.create_all(engine)
+
+ connector._adapt_column_type("test_table", "name", sqlalchemy.types.String())
+
+ with engine.connect() as conn:
+ result = conn.execute(
+ sqlalchemy.text("SELECT * FROM test_table"),
+ )
+ assert result.keys() == ["id", "name"]
+ assert result.cursor.description[1][1] == "STRING"
+
+
+def test_adapter_without_json_serde():
+ registry.register(
+ "myrdbms",
+ "samples.sample_custom_sql_adapter.connector",
+ "CustomSQLDialect",
+ )
+
+ class CustomConnector(SQLConnector):
+ def create_engine(self) -> Engine:
+ return super().create_engine()
+
+ connector = CustomConnector(config={"sqlalchemy_url": "myrdbms:///"})
+ connector.create_engine()
diff --git a/tests/core/test_io.py b/tests/core/test_io.py
index c8de024472..1c715f8423 100644
--- a/tests/core/test_io.py
+++ b/tests/core/test_io.py
@@ -38,7 +38,7 @@ def _process_state_message(self, message_dict: dict) -> None:
id="unparsable",
),
pytest.param(
- '{"type": "RECORD", "stream": "users", "record": {"id": 1, "value": 1.23}}', # noqa: E501
+ '{"type": "RECORD", "stream": "users", "record": {"id": 1, "value": 1.23}}',
{
"type": "RECORD",
"stream": "users",
diff --git a/tests/core/test_jsonschema_helpers.py b/tests/core/test_jsonschema_helpers.py
index 3e4ba6eca8..8438a6168a 100644
--- a/tests/core/test_jsonschema_helpers.py
+++ b/tests/core/test_jsonschema_helpers.py
@@ -4,6 +4,7 @@
import re
import typing as t
+from logging import WARNING
from textwrap import dedent
import pytest
@@ -26,6 +27,7 @@
)
from singer_sdk.tap_base import Tap
from singer_sdk.typing import (
+ AnyType,
ArrayType,
BooleanType,
CustomType,
@@ -130,6 +132,26 @@ def test_to_json():
)
+def test_any_type(caplog: pytest.LogCaptureFixture):
+ schema = PropertiesList(
+ Property("any_type", AnyType, description="Can be anything"),
+ )
+ with caplog.at_level(WARNING):
+ assert schema.to_dict() == {
+ "type": "object",
+ "properties": {
+ "any_type": {
+ "description": "Can be anything",
+ },
+ },
+ }
+ assert caplog.records[0].levelname == "WARNING"
+ assert caplog.records[0].message == (
+ "Could not append type because the JSON schema for the dictionary `{}` "
+ "appears to be invalid."
+ )
+
+
def test_nested_complex_objects():
test1a = Property(
"Datasets",
@@ -490,7 +512,7 @@ def test_property_creation(
property_dict = property_obj.to_dict()
assert property_dict == expected_jsonschema
for check_fn in TYPE_FN_CHECKS:
- property_name = list(property_dict.keys())[0]
+ property_name = next(iter(property_dict.keys()))
property_node = property_dict[property_name]
if check_fn in type_fn_checks_true:
assert (
diff --git a/tests/core/test_mapper.py b/tests/core/test_mapper.py
index 036d7586ae..10f65cf8ef 100644
--- a/tests/core/test_mapper.py
+++ b/tests/core/test_mapper.py
@@ -3,14 +3,16 @@
from __future__ import annotations
import copy
+import datetime
import io
import json
import logging
import typing as t
from contextlib import redirect_stdout
+from decimal import Decimal
import pytest
-from freezegun import freeze_time
+import time_machine
from singer_sdk._singerlib import Catalog
from singer_sdk.exceptions import MapExpressionError
@@ -19,8 +21,12 @@
from singer_sdk.streams.core import Stream
from singer_sdk.tap_base import Tap
from singer_sdk.typing import (
+ ArrayType,
+ BooleanType,
IntegerType,
+ NumberType,
ObjectType,
+ OneOf,
PropertiesList,
Property,
StringType,
@@ -52,6 +58,18 @@ def sample_catalog_dict() -> dict:
Property("the", StringType),
Property("brown", StringType),
).to_dict()
+ nested_jellybean_schema = PropertiesList(
+ Property("id", IntegerType),
+ Property(
+ "custom_fields",
+ ArrayType(
+ ObjectType(
+ Property("id", IntegerType),
+ Property("value", OneOf(StringType, IntegerType, BooleanType)),
+ ),
+ ),
+ ),
+ ).to_dict()
return {
"streams": [
{
@@ -64,6 +82,11 @@ def sample_catalog_dict() -> dict:
"tap_stream_id": "foobars",
"schema": foobars_schema,
},
+ {
+ "stream": "nested_jellybean",
+ "tap_stream_id": "nested_jellybean",
+ "schema": nested_jellybean_schema,
+ },
],
}
@@ -106,6 +129,24 @@ def sample_stream():
{"the": "quick"},
{"brown": "fox"},
],
+ "nested_jellybean": [
+ {
+ "id": 123,
+ "custom_fields": [
+ {"id": 1, "value": "abc"},
+ {"id": 2, "value": 1212},
+ {"id": 3, "value": None},
+ ],
+ },
+ {
+ "id": 124,
+ "custom_fields": [
+ {"id": 1, "value": "foo"},
+ {"id": 2, "value": 9009},
+ {"id": 3, "value": True},
+ ],
+ },
+ ],
}
@@ -114,6 +155,19 @@ def sample_stream():
@pytest.fixture
def transform_stream_maps():
+ nested_jellybean_custom_field_1 = (
+ 'dict([(x["id"], x["value"]) for x in custom_fields]).get(1)'
+ )
+ nested_jellybean_custom_field_2 = (
+ 'int(dict([(x["id"], x["value"]) for x in custom_fields]).get(2)) '
+ 'if dict([(x["id"], x["value"]) for x in custom_fields]).get(2) '
+ "else None"
+ )
+ nested_jellybean_custom_field_3 = (
+ 'bool(dict([(x["id"], x["value"]) for x in custom_fields]).get(3)) '
+ 'if dict([(x["id"], x["value"]) for x in custom_fields]).get(3) '
+ "else None"
+ )
return {
"repositories": {
"repo_name": "_['name']",
@@ -125,6 +179,12 @@ def transform_stream_maps():
"int_test": "int('0')",
"__else__": None,
},
+ "nested_jellybean": {
+ "custom_fields": "__NULL__",
+ "custom_field_1": nested_jellybean_custom_field_1,
+ "custom_field_2": nested_jellybean_custom_field_2,
+ "custom_field_3": nested_jellybean_custom_field_3,
+ },
}
@@ -181,6 +241,20 @@ def transformed_result(stream_map_config):
{"the": "quick"},
{"brown": "fox"},
],
+ "nested_jellybean": [
+ {
+ "id": 123,
+ "custom_field_1": "abc",
+ "custom_field_2": 1212,
+ "custom_field_3": None,
+ },
+ {
+ "id": 124,
+ "custom_field_1": "foo",
+ "custom_field_2": 9009,
+ "custom_field_3": True,
+ },
+ ],
}
@@ -200,6 +274,12 @@ def transformed_schemas():
Property("the", StringType),
Property("brown", StringType),
).to_dict(),
+ "nested_jellybean": PropertiesList(
+ Property("id", IntegerType),
+ Property("custom_field_1", StringType),
+ Property("custom_field_2", IntegerType),
+ Property("custom_field_3", BooleanType),
+ ).to_dict(),
}
@@ -354,17 +434,15 @@ def test_filter_transforms_w_error(
)
-def _test_transform(
- test_name: str,
+def _run_transform(
*,
stream_maps,
stream_map_config,
- expected_result,
- expected_schemas,
sample_stream,
sample_catalog_obj,
):
output: dict[str, list[dict]] = {}
+ output_schemas = {}
mapper = PluginMapper(
plugin_config={
"stream_maps": stream_maps,
@@ -379,15 +457,7 @@ def _test_transform(
if isinstance(stream_map, RemoveRecordTransform):
logging.info("Skipping ignored stream '%s'", stream_name)
continue
-
- assert (
- expected_schemas[stream_map.stream_alias]
- == stream_map.transformed_schema
- ), (
- f"Failed '{test_name}' schema test. Generated schema was "
- f"{json.dumps(stream_map.transformed_schema, indent=2)}"
- )
-
+ output_schemas[stream_map.stream_alias] = stream_map.transformed_schema
output[stream_map.stream_alias] = []
for record in stream:
result = stream_map.transform(record)
@@ -396,6 +466,39 @@ def _test_transform(
continue
output[stream_map.stream_alias].append(result)
+ return output, output_schemas
+
+
+def _test_transform(
+ test_name: str,
+ *,
+ stream_maps,
+ stream_map_config,
+ expected_result,
+ expected_schemas,
+ sample_stream,
+ sample_catalog_obj,
+):
+ output, output_schemas = _run_transform(
+ stream_maps=stream_maps,
+ stream_map_config=stream_map_config,
+ sample_stream=sample_stream,
+ sample_catalog_obj=sample_catalog_obj,
+ )
+
+ assert set(expected_schemas.keys()) == set(output_schemas.keys()), (
+ f"Failed `{test_name}` schema test. "
+ f"'{set(expected_schemas.keys()) - set(output_schemas.keys())}' "
+ "schemas not found. "
+ f"'{set(output_schemas.keys()) - set(expected_schemas.keys())}' "
+ "schemas not expected. "
+ )
+ for expected_schema_name, expected_schema in expected_schemas.items():
+ output_schema = output_schemas[expected_schema_name]
+ assert expected_schema == output_schema, (
+ f"Failed '{test_name}' schema test. Generated schema was "
+ f"{json.dumps(output_schema, indent=2)}"
+ )
assert expected_result == output, (
f"Failed '{test_name}' record result test. "
@@ -415,6 +518,7 @@ class MappedStream(Stream):
ObjectType(
Property("id", IntegerType()),
Property("sub", ObjectType(Property("num", IntegerType()))),
+ Property("some_numbers", ArrayType(NumberType())),
),
),
).to_dict()
@@ -423,17 +527,29 @@ def get_records(self, context): # noqa: ARG002
yield {
"email": "alice@example.com",
"count": 21,
- "user": {"id": 1, "sub": {"num": 1}},
+ "user": {
+ "id": 1,
+ "sub": {"num": 1},
+ "some_numbers": [Decimal("3.14"), Decimal("2.718")],
+ },
}
yield {
"email": "bob@example.com",
"count": 13,
- "user": {"id": 2, "sub": {"num": 2}},
+ "user": {
+ "id": 2,
+ "sub": {"num": 2},
+ "some_numbers": [Decimal("10.32"), Decimal("1.618")],
+ },
}
yield {
"email": "charlie@example.com",
"count": 19,
- "user": {"id": 3, "sub": {"num": 3}},
+ "user": {
+ "id": 3,
+ "sub": {"num": 3},
+ "some_numbers": [Decimal("1.414"), Decimal("1.732")],
+ },
}
@@ -454,7 +570,10 @@ def _clear_schema_cache() -> None:
get_selected_schema.cache_clear()
-@freeze_time("2022-01-01T00:00:00Z")
+@time_machine.travel(
+ datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+)
@pytest.mark.snapshot()
@pytest.mark.usefixtures("_clear_schema_cache")
@pytest.mark.parametrize(
@@ -545,6 +664,13 @@ def _clear_schema_cache() -> None:
"aliased_stream.jsonl",
id="aliased_stream",
),
+ pytest.param(
+ {},
+ True,
+ 0,
+ "flatten_depth_0.jsonl",
+ id="flatten_depth_0",
+ ),
pytest.param(
{},
True,
@@ -601,6 +727,18 @@ def _clear_schema_cache() -> None:
"non_pk_passthrough.jsonl",
id="non_pk_passthrough",
),
+ pytest.param(
+ {
+ "mystream": {
+ "_data": "record",
+ "__else__": None,
+ },
+ },
+ False,
+ 0,
+ "record_to_column.jsonl",
+ id="record_to_column",
+ ),
],
)
def test_mapped_stream(
@@ -626,3 +764,37 @@ def test_mapped_stream(
buf.seek(0)
snapshot.assert_match(buf.read(), snapshot_name)
+
+
+def test_bench_simple_map_transforms(
+ benchmark,
+ sample_stream,
+ sample_catalog_dict,
+ transform_stream_maps,
+ stream_map_config,
+):
+ """Run benchmark tests using the "repositories" stream."""
+ stream_size_scale = 1000
+
+ repositories_catalog = {
+ "streams": [
+ x
+ for x in sample_catalog_dict["streams"]
+ if x["tap_stream_id"] == "repositories"
+ ],
+ }
+
+ repositories_sample_stream = {
+ "repositories": sample_stream["repositories"] * stream_size_scale,
+ }
+ repositories_transform_stream_maps = {
+ "repositories": transform_stream_maps["repositories"],
+ }
+ repositories_sample_catalog_obj = Catalog.from_dict(repositories_catalog)
+ benchmark(
+ _run_transform,
+ stream_maps=repositories_transform_stream_maps,
+ stream_map_config=stream_map_config,
+ sample_stream=repositories_sample_stream,
+ sample_catalog_obj=repositories_sample_catalog_obj,
+ )
diff --git a/tests/core/test_mapper_class.py b/tests/core/test_mapper_class.py
new file mode 100644
index 0000000000..0f0c1192a7
--- /dev/null
+++ b/tests/core/test_mapper_class.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import json
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from samples.sample_mapper.mapper import StreamTransform
+from singer_sdk.exceptions import ConfigValidationError
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'stream_maps' is a required property"],
+ id="missing_stream_maps",
+ ),
+ pytest.param(
+ {"stream_maps": {}},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(config_dict: dict, expectation, errors: list[str]):
+ with expectation as exc:
+ StreamTransform(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli_help():
+ """Test the CLI help message."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(StreamTransform.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(StreamTransform.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'stream_maps' is a required property" in result.stderr
diff --git a/tests/core/test_simpleeval.py b/tests/core/test_simpleeval.py
deleted file mode 100644
index d5cacb30f1..0000000000
--- a/tests/core/test_simpleeval.py
+++ /dev/null
@@ -1,1146 +0,0 @@
-"""
-Simpleeval tests originally imported on 2021-09-16 from:
-- https://github.com/danthedeckie/simpleeval
-
-For more information:
-- https://gitlab.com/meltano/sdk/-/issues/213
-
-"""
-from __future__ import annotations
-
-import ast
-import operator
-import os
-
-# flake8: noqa # Ignoring flake errors in imported module
-# pylint: disable=too-many-public-methods, missing-docstring
-import sys
-import unittest
-import warnings
-
-from singer_sdk.helpers import _simpleeval as simpleeval
-from singer_sdk.helpers._simpleeval import (
- AttributeDoesNotExist,
- EvalWithCompoundTypes,
- FeatureNotAvailable,
- FunctionNotDefined,
- InvalidExpression,
- NameNotDefined,
- SimpleEval,
- simple_eval,
-)
-
-
-class DRYTest(unittest.TestCase):
- """Stuff we need to do every test, let's do here instead..
- Don't Repeat Yourself."""
-
- def setUp(self):
- """ initialize a SimpleEval """
- self.s = SimpleEval()
-
- def t(self, expr, shouldbe): # pylint: disable=invalid-name
- """ test an evaluation of an expression against an expected answer """
- return self.assertEqual(self.s.eval(expr), shouldbe)
-
-
-class TestBasic(DRYTest):
- """ Simple expressions. """
-
- def test_maths_with_ints(self):
- """ simple maths expressions """
-
- self.t("21 + 21", 42)
- self.t("6*7", 42)
- self.t("20 + 1 + (10*2) + 1", 42)
- self.t("100/10", 10)
- self.t("12*12", 144)
- self.t("2 ** 10", 1024)
- self.t("100 % 9", 1)
-
- def test_bools_and_or(self):
- self.t('True and ""', "")
- self.t("True and False", False)
- self.t("True or False", True)
- self.t("False or False", False)
- self.t("1 - 1 or 21", 21)
- self.t("1 - 1 and 11", 0)
- self.t("110 == 100 + 10 and True", True)
- self.t("110 != 100 + 10 and True", False)
- self.t("False or 42", 42)
-
- self.t("False or None", None)
- self.t("None or None", None)
-
- self.s.names = {"out": True, "position": 3}
- self.t(
- "(out and position <=6 and -10)"
- " or (out and position > 6 and -5)"
- " or (not out and 15)",
- -10,
- )
-
- def test_not(self):
- self.t("not False", True)
- self.t("not True", False)
- self.t("not 0", True)
- self.t("not 1", False)
-
- def test_maths_with_floats(self):
- self.t("11.02 - 9.1", 1.92)
- self.t("29.1+39", 68.1)
-
- def test_comparisons(self):
- # GT & LT:
- self.t("1 > 0", True)
- self.t("100000 < 28", False)
- self.t("-2 < 11", True)
- self.t("+2 < 5", True)
- self.t("0 == 0", True)
-
- # GtE, LtE
- self.t("-2 <= -2", True)
- self.t("2 >= 2", True)
- self.t("1 >= 12", False)
- self.t("1.09 <= 1967392", True)
-
- self.t("1 < 2 < 3 < 4", 1 < 2 < 3 < 4)
- self.t("1 < 2 > 3 < 4", 1 < 2 > 3 < 4)
-
- self.t("1<2<1+1", 1 < 2 < 1 + 1)
- self.t("1 == 1 == 2", 1 == 1 == 2)
- self.t("1 == 1 < 2", 1 == 1 < 2)
-
- def test_mixed_comparisons(self):
- self.t("1 > 0.999999", True)
- self.t("1 == True", True) # Note ==, not 'is'.
- self.t("0 == False", True) # Note ==, not 'is'.
- self.t("False == False", True)
- self.t("False < True", True)
-
- def test_if_else(self):
- """ x if y else z """
-
- # and test if/else expressions:
- self.t("'a' if 1 == 1 else 'b'", "a")
- self.t("'a' if 1 > 2 else 'b'", "b")
-
- # and more complex expressions:
- self.t("'a' if 4 < 1 else 'b' if 1 == 2 else 'c'", "c")
-
- def test_default_conversions(self):
- """ conversion between types """
-
- self.t('int("20") + int(0.22*100)', 42)
- self.t('float("42")', 42.0)
- self.t('"Test Stuff!" + str(11)', "Test Stuff!11")
-
- def test_slicing(self):
- self.s.operators[ast.Slice] = (
- operator.getslice if hasattr(operator, "getslice") else operator.getitem
- )
- self.t("'hello'[1]", "e")
- self.t("'hello'[:]", "hello")
- self.t("'hello'[:3]", "hel")
- self.t("'hello'[3:]", "lo")
- self.t("'hello'[::2]", "hlo")
- self.t("'hello'[::-1]", "olleh")
- self.t("'hello'[3::]", "lo")
- self.t("'hello'[:3:]", "hel")
- self.t("'hello'[1:3]", "el")
- self.t("'hello'[1:3:]", "el")
- self.t("'hello'[1::2]", "el")
- self.t("'hello'[:1:2]", "h")
- self.t("'hello'[1:3:1]", "el")
- self.t("'hello'[1:3:2]", "e")
-
- with self.assertRaises(IndexError):
- self.t("'hello'[90]", 0)
-
- self.t('"spam" not in "my breakfast"', True)
- self.t('"silly" in "ministry of silly walks"', True)
- self.t('"I" not in "team"', True)
- self.t('"U" in "RUBBISH"', True)
-
- def test_is(self):
- self.t("1 is 1", True)
- self.t("1 is 2", False)
- self.t('1 is "a"', False)
- self.t("1 is None", False)
- self.t("None is None", True)
-
- self.t("1 is not 1", False)
- self.t("1 is not 2", True)
- self.t('1 is not "a"', True)
- self.t("1 is not None", True)
- self.t("None is not None", False)
-
- def test_fstring(self):
- if sys.version_info >= (3, 6, 0):
- self.t('f""', "")
- self.t('f"stuff"', "stuff")
- self.t('f"one is {1} and two is {2}"', "one is 1 and two is 2")
- self.t('f"1+1 is {1+1}"', "1+1 is 2")
- self.t("f\"{'dramatic':!<11}\"", "dramatic!!!")
-
- def test_set_not_allowed(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("{22}", False)
-
-
-class TestFunctions(DRYTest):
- """ Functions for expressions to play with """
-
- def test_load_file(self):
- """ add in a function which loads data from an external file. """
-
- # write to the file:
-
- with open("testfile.txt", "w") as f:
- f.write("42")
-
- # define the function we'll send to the eval'er
-
- def load_file(filename):
- """ load a file and return its contents """
- with open(filename) as f2:
- return f2.read()
-
- # simple load:
-
- self.s.functions = {"read": load_file}
- self.t("read('testfile.txt')", "42")
-
- # and we should have *replaced* the default functions. Let's check:
-
- with self.assertRaises(simpleeval.FunctionNotDefined):
- self.t("int(read('testfile.txt'))", 42)
-
- # OK, so we can load in the default functions as well...
-
- self.s.functions.update(simpleeval.DEFAULT_FUNCTIONS)
-
- # now it works:
-
- self.t("int(read('testfile.txt'))", 42)
-
- os.remove("testfile.txt")
-
- def test_randoms(self):
- """ test the rand() and randint() functions """
-
- i = self.s.eval("randint(1000)")
- self.assertEqual(type(i), int)
- self.assertLessEqual(i, 1000)
-
- f = self.s.eval("rand()")
- self.assertEqual(type(f), float)
-
- self.t("randint(20)<20", True)
- self.t("rand()<1.0", True)
-
- # I don't know how to further test these functions. Ideas?
-
- def test_methods(self):
- self.t('"WORD".lower()', "word")
- x = simpleeval.DISALLOW_METHODS
- simpleeval.DISALLOW_METHODS = []
- self.t('"{}:{}".format(1, 2)', "1:2")
- simpleeval.DISALLOW_METHODS = x
-
- def test_function_args_none(self):
- def foo():
- return 42
-
- self.s.functions["foo"] = foo
- self.t("foo()", 42)
-
- def test_function_args_required(self):
- def foo(toret):
- return toret
-
- self.s.functions["foo"] = foo
- with self.assertRaises(TypeError):
- self.t("foo()", 42)
-
- self.t("foo(12)", 12)
- self.t("foo(toret=100)", 100)
-
- def test_function_args_defaults(self):
- def foo(toret=9999):
- return toret
-
- self.s.functions["foo"] = foo
- self.t("foo()", 9999)
-
- self.t("foo(12)", 12)
- self.t("foo(toret=100)", 100)
-
- def test_function_args_bothtypes(self):
- def foo(mult, toret=100):
- return toret * mult
-
- self.s.functions["foo"] = foo
- with self.assertRaises(TypeError):
- self.t("foo()", 9999)
-
- self.t("foo(2)", 200)
-
- with self.assertRaises(TypeError):
- self.t("foo(toret=100)", 100)
-
- self.t("foo(4, toret=4)", 16)
- self.t("foo(mult=2, toret=4)", 8)
- self.t("foo(2, 10)", 20)
-
-
-class TestOperators(DRYTest):
- """ Test adding in new operators, removing them, make sure it works. """
-
- # TODO
- pass
-
-
-class TestNewFeatures(DRYTest):
- """ Tests which will break when new features are added..."""
-
- def test_lambda(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("lambda x:22", None)
-
- def test_lambda_application(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("(lambda x:22)(44)", None)
-
-
-class TestTryingToBreakOut(DRYTest):
- """ Test various weird methods to break the security sandbox... """
-
- def test_import(self):
- """ usual suspect. import """
- # cannot import things:
- with self.assertRaises(FeatureNotAvailable):
- self.t("import sys", None)
-
- def test_long_running(self):
- """ exponent operations can take a long time. """
- old_max = simpleeval.MAX_POWER
-
- self.t("9**9**5", 9 ** 9 ** 5)
-
- with self.assertRaises(simpleeval.NumberTooHigh):
- self.t("9**9**8", 0)
-
- # and does limiting work?
-
- simpleeval.MAX_POWER = 100
-
- with self.assertRaises(simpleeval.NumberTooHigh):
- self.t("101**2", 0)
-
- # good, so set it back:
-
- simpleeval.MAX_POWER = old_max
-
- def test_encode_bignums(self):
- # thanks gk
- if hasattr(1, "from_bytes"): # python3 only
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t(
- '(1).from_bytes(("123123123123123123123123").encode()*999999, "big")',
- 0,
- )
-
- def test_string_length(self):
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("50000*'text'", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'text'*50000", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('text'*50000)*1000", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("(50000*'text')*1000", 0)
-
- self.t("'stuff'*20000", 20000 * "stuff")
-
- self.t("20000*'stuff'", 20000 * "stuff")
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('stuff'*20000) + ('stuff'*20000) ", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'stuff'*100000", 100000 * "stuff")
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'" + (10000 * "stuff") + "'*100", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'" + (50000 * "stuff") + "'", 0)
-
- if sys.version_info >= (3, 6, 0):
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("f'{\"foo\"*50000}'", 0)
-
- def test_bytes_array_test(self):
- self.t(
- "'20000000000000000000'.encode() * 5000",
- "20000000000000000000".encode() * 5000,
- )
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'123121323123131231223'.encode() * 5000", 20)
-
- def test_list_length_test(self):
- self.t("'spam spam spam'.split() * 5000", ["spam", "spam", "spam"] * 5000)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('spam spam spam' * 5000).split() * 5000", None)
-
- def test_python_stuff(self):
- """ other various pythony things. """
- # it only evaluates the first statement:
- self.t("11; x = 21; x + x", 11)
-
- def test_function_globals_breakout(self):
- """ by accessing function.__globals__ or func_... """
- # thanks perkinslr.
-
- self.s.functions["x"] = lambda y: y + y
- self.t("x(100)", 200)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("x.__globals__", None)
-
- class EscapeArtist(object):
- @staticmethod
- def trapdoor():
- return 42
-
- @staticmethod
- def _quasi_private():
- return 84
-
- self.s.names["houdini"] = EscapeArtist()
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini.trapdoor.__globals__", 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini.trapdoor.func_globals", 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini._quasi_private()", 0)
-
- # and test for changing '_' to '__':
-
- dis = simpleeval.DISALLOW_PREFIXES
- simpleeval.DISALLOW_PREFIXES = ["func_"]
-
- self.t("houdini.trapdoor()", 42)
- self.t("houdini._quasi_private()", 84)
-
- # and return things to normal
-
- simpleeval.DISALLOW_PREFIXES = dis
-
- def test_mro_breakout(self):
- class Blah(object):
- x = 42
-
- self.s.names["b"] = Blah
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("b.mro()", None)
-
- def test_builtins_private_access(self):
- # explicit attempt of the exploit from perkinslr
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t(
- "True.__class__.__class__.__base__.__subclasses__()[-1]"
- ".__init__.func_globals['sys'].exit(1)",
- 42,
- )
-
- def test_string_format(self):
- # python has so many ways to break out!
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('"{string.__class__}".format(string="things")', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.s.names["x"] = {"a": 1}
- self.t('"{a.__class__}".format_map(x)', 0)
-
- if sys.version_info >= (3, 6, 0):
- self.s.names["x"] = 42
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{x.__class__}"', 0)
-
- self.s.names["x"] = lambda y: y
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{x.__globals__}"', 0)
-
- class EscapeArtist(object):
- @staticmethod
- def trapdoor():
- return 42
-
- @staticmethod
- def _quasi_private():
- return 84
-
- self.s.names[
- "houdini"
- ] = EscapeArtist() # let's just retest this, but in a f-string
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini.trapdoor.__globals__}"', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini.trapdoor.func_globals}"', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini._quasi_private()}"', 0)
-
- # and test for changing '_' to '__':
-
- dis = simpleeval.DISALLOW_PREFIXES
- simpleeval.DISALLOW_PREFIXES = ["func_"]
-
- self.t('f"{houdini.trapdoor()}"', "42")
- self.t('f"{houdini._quasi_private()}"', "84")
-
- # and return things to normal
-
- simpleeval.DISALLOW_PREFIXES = dis
-
-
-class TestCompoundTypes(DRYTest):
- """ Test the compound-types edition of the library """
-
- def setUp(self):
- self.s = EvalWithCompoundTypes()
-
- def test_dict(self):
- self.t("{}", {})
- self.t('{"foo": "bar"}', {"foo": "bar"})
- self.t('{"foo": "bar"}["foo"]', "bar")
- self.t("dict()", {})
- self.t("dict(a=1)", {"a": 1})
-
- def test_dict_contains(self):
- self.t('{"a":22}["a"]', 22)
- with self.assertRaises(KeyError):
- self.t('{"a":22}["b"]', 22)
-
- self.t('{"a": 24}.get("b", 11)', 11)
- self.t('"a" in {"a": 24}', True)
-
- def test_tuple(self):
- self.t("()", ())
- self.t("(1,)", (1,))
- self.t("(1, 2, 3, 4, 5, 6)", (1, 2, 3, 4, 5, 6))
- self.t("(1, 2) + (3, 4)", (1, 2, 3, 4))
- self.t("(1, 2, 3)[1]", 2)
- self.t("tuple()", ())
- self.t('tuple("foo")', ("f", "o", "o"))
-
- def test_tuple_contains(self):
- self.t('("a","b")[1]', "b")
- with self.assertRaises(IndexError):
- self.t('("a","b")[5]', "b")
- self.t('"a" in ("b","c","a")', True)
-
- def test_list(self):
- self.t("[]", [])
- self.t("[1]", [1])
- self.t("[1, 2, 3, 4, 5]", [1, 2, 3, 4, 5])
- self.t("[1, 2, 3][1]", 2)
- self.t("list()", [])
- self.t('list("foo")', ["f", "o", "o"])
-
- def test_list_contains(self):
- self.t('["a","b"][1]', "b")
- with self.assertRaises(IndexError):
- self.t('("a","b")[5]', "b")
-
- self.t('"b" in ["a","b"]', True)
-
- def test_set(self):
- self.t("{1}", {1})
- self.t("{1, 2, 1, 2, 1, 2, 1}", {1, 2})
- self.t("set()", set())
- self.t('set("foo")', {"f", "o"})
-
- self.t("2 in {1,2,3,4}", True)
- self.t("22 not in {1,2,3,4}", True)
-
- def test_not(self):
- self.t("not []", True)
- self.t("not [0]", False)
- self.t("not {}", True)
- self.t("not {0: 1}", False)
- self.t("not {0}", False)
-
- def test_use_func(self):
- self.s = EvalWithCompoundTypes(functions={"map": map, "str": str})
- self.t("list(map(str, [-1, 0, 1]))", ["-1", "0", "1"])
- with self.assertRaises(NameNotDefined):
- self.s.eval("list(map(bad, [-1, 0, 1]))")
-
- with self.assertRaises(FunctionNotDefined):
- self.s.eval("dir(str)")
- with self.assertRaises(FeatureNotAvailable):
- self.s.eval("str.__dict__")
-
- self.s = EvalWithCompoundTypes(functions={"dir": dir, "str": str})
- self.t("dir(str)", dir(str))
-
-
-class TestComprehensions(DRYTest):
- """ Test the comprehensions support of the compound-types edition of the class. """
-
- def setUp(self):
- self.s = EvalWithCompoundTypes()
-
- def test_basic(self):
- self.t("[a + 1 for a in [1,2,3]]", [2, 3, 4])
-
- def test_with_self_reference(self):
- self.t("[a + a for a in [1,2,3]]", [2, 4, 6])
-
- def test_with_if(self):
- self.t("[a for a in [1,2,3,4,5] if a <= 3]", [1, 2, 3])
-
- def test_with_multiple_if(self):
- self.t("[a for a in [1,2,3,4,5] if a <= 3 and a > 1 ]", [2, 3])
-
- def test_attr_access_fails(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("[a.__class__ for a in [1,2,3]]", None)
-
- def test_unpack(self):
- self.t("[a+b for a,b in ((1,2),(3,4))]", [3, 7])
-
- def test_nested_unpack(self):
- self.t("[a+b+c for a, (b, c) in ((1,(1,1)),(3,(2,2)))]", [3, 7])
-
- def test_other_places(self):
- self.s.functions = {"sum": sum}
- self.t("sum([a+1 for a in [1,2,3,4,5]])", 20)
- self.t("sum(a+1 for a in [1,2,3,4,5])", 20)
-
- def test_external_names_work(self):
- self.s.names = {"x": [22, 102, 12.3]}
- self.t("[a/2 for a in x]", [11.0, 51.0, 6.15])
-
- self.s.names = lambda x: ord(x.id)
- self.t("[a + a for a in [b, c, d]]", [ord(x) * 2 for x in "bcd"])
-
- def test_multiple_generators(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(100) if i > 10 for j in range(i) if j < 20]"
- self.t(s, eval(s))
-
- def test_triple_generators(self):
- self.s.functions = {"range": range}
- s = "[(a,b,c) for a in range(4) for b in range(a) for c in range(b)]"
- self.t(s, eval(s))
-
- def test_too_long_generator(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(1000) if i > 10 for j in range(i) if j < 20]"
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_too_long_generator_2(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(100) if i > 1 for j in range(i+10) if j < 100 for k in range(i*j)]"
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_nesting_generators_to_cheat(self):
- self.s.functions = {"range": range}
- s = "[[[c for c in range(a)] for a in range(b)] for b in range(200)]"
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_no_leaking_names(self):
- # see issue #52, failing list comprehensions could leak locals
- with self.assertRaises(simpleeval.NameNotDefined):
- self.s.eval('[x if x == "2" else y for x in "123"]')
-
- with self.assertRaises(simpleeval.NameNotDefined):
- self.s.eval("x")
-
-
-class TestNames(DRYTest):
- """ 'names', what other languages call variables... """
-
- def test_none(self):
- """ what to do when names isn't defined, or is 'none' """
- with self.assertRaises(NameNotDefined):
- self.t("a == 2", None)
-
- self.s.names["s"] = 21
-
- with self.assertRaises(NameNotDefined):
- with warnings.catch_warnings(record=True) as ws:
- self.t("s += a", 21)
-
- self.s.names = None
-
- with self.assertRaises(InvalidExpression):
- self.t("s", 21)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- with self.assertRaises(AttributeDoesNotExist):
- self.t("a.b.d**2", 42)
-
- def test_dict(self):
- """ using a normal dict for names lookup """
-
- self.s.names = {"a": 42}
- self.t("a + a", 84)
-
- self.s.names["also"] = 100
-
- self.t("a + also - a", 100)
-
- # however, you can't assign to those names:
- with warnings.catch_warnings(record=True) as ws:
- self.t("a = 200", 200)
-
- self.assertEqual(self.s.names["a"], 42)
-
- # or assign to lists
-
- self.s.names["b"] = [0]
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("b[0] = 11", 11)
-
- self.assertEqual(self.s.names["b"], [0])
-
- # but you can get items from a list:
-
- self.s.names["b"] = [6, 7]
-
- self.t("b[0] * b[1]", 42)
-
- # or from a dict
-
- self.s.names["c"] = {"i": 11}
-
- self.t("c['i']", 11)
- self.t("c.get('i')", 11)
- self.t("c.get('j', 11)", 11)
- self.t("c.get('j')", None)
-
- # you still can't assign though:
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("c['b'] = 99", 99)
-
- self.assertFalse("b" in self.s.names["c"])
-
- # and going all 'inception' on it doesn't work either:
-
- self.s.names["c"]["c"] = {"c": 11}
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("c['c']['c'] = 21", 21)
-
- self.assertEqual(self.s.names["c"]["c"]["c"], 11)
-
- def test_dict_attr_access(self):
- # nested dict
-
- self.assertEqual(self.s.ATTR_INDEX_FALLBACK, True)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- self.t("a.b.c*2", 84)
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("a.b.c = 11", 11)
-
- self.assertEqual(self.s.names["a"]["b"]["c"], 42)
-
- # TODO: Wat?
- with warnings.catch_warnings(record=True) as ws:
- self.t("a.d = 11", 11)
-
- with self.assertRaises(KeyError):
- self.assertEqual(self.s.names["a"]["d"], 11)
-
- def test_dict_attr_access_disabled(self):
- # nested dict
-
- self.s.ATTR_INDEX_FALLBACK = False
- self.assertEqual(self.s.ATTR_INDEX_FALLBACK, False)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- with self.assertRaises(simpleeval.AttributeDoesNotExist):
- self.t("a.b.c * 2", 84)
-
- self.t("a['b']['c'] * 2", 84)
-
- self.assertEqual(self.s.names["a"]["b"]["c"], 42)
-
- def test_object(self):
- """ using an object for name lookup """
-
- class TestObject(object):
- @staticmethod
- def method_thing():
- return 42
-
- o = TestObject()
- o.a = 23
- o.b = 42
- o.c = TestObject()
- o.c.d = 9001
-
- self.s.names = {"o": o}
-
- self.t("o", o)
- self.t("o.a", 23)
- self.t("o.b + o.c.d", 9043)
-
- self.t("o.method_thing()", 42)
-
- with self.assertRaises(AttributeDoesNotExist):
- self.t("o.d", None)
-
- def test_func(self):
- """ using a function for 'names lookup' """
-
- def resolver(_):
- """ all names now equal 1024! """
- return 1024
-
- self.s.names = resolver
-
- self.t("a", 1024)
- self.t("a + b - c - d", 0)
-
- # the function can do stuff with the value it's sent:
-
- def my_name(node):
- """ all names equal their textual name, twice. """
- return node.id + node.id
-
- self.s.names = my_name
-
- self.t("a", "aa")
-
- def test_from_doc(self):
- """ the 'name first letter as value' example from the docs """
-
- def name_handler(node):
- """return the alphabet number of the first letter of
- the name's textual name"""
- return ord(node.id[0].lower()) - 96
-
- self.s.names = name_handler
- self.t("a", 1)
- self.t("a + b", 3)
-
-
-class TestWhitespace(DRYTest):
- """ test that incorrect whitespace (preceding/trailing) doesn't matter. """
-
- def test_no_whitespace(self):
- self.t("200 + 200", 400)
-
- def test_trailing(self):
- self.t("200 + 200 ", 400)
-
- def test_preciding_whitespace(self):
- self.t(" 200 + 200", 400)
-
- def test_preceding_tab_whitespace(self):
- self.t("\t200 + 200", 400)
-
- def test_preceding_mixed_whitespace(self):
- self.t(" \t 200 + 200", 400)
-
- def test_both_ends_whitespace(self):
- self.t(" \t 200 + 200 ", 400)
-
-
-class TestSimpleEval(unittest.TestCase):
- """ test the 'simple_eval' wrapper function """
-
- def test_basic_run(self):
- self.assertEqual(simple_eval("6*7"), 42)
-
- def test_default_functions(self):
- self.assertEqual(simple_eval("rand() < 1.0 and rand() > -0.01"), True)
- self.assertEqual(simple_eval("randint(200) < 200 and rand() > 0"), True)
-
-
-class TestMethodChaining(unittest.TestCase):
- def test_chaining_correct(self):
- """
- Contributed by Khalid Grandi (xaled).
- """
-
- class A(object):
- def __init__(self):
- self.a = "0"
-
- def add(self, b):
- self.a += "-add" + str(b)
- return self
-
- def sub(self, b):
- self.a += "-sub" + str(b)
- return self
-
- def tostring(self):
- return str(self.a)
-
- x = A()
- self.assertEqual(
- simple_eval("x.add(1).sub(2).sub(3).tostring()", names={"x": x}),
- "0-add1-sub2-sub3",
- )
-
-
-class TestExtendingClass(unittest.TestCase):
- """
- It should be pretty easy to extend / inherit from the SimpleEval class,
- to further lock things down, or unlock stuff, or whatever.
- """
-
- def test_methods_forbidden(self):
- # Example from README
- class EvalNoMethods(simpleeval.SimpleEval):
- def _eval_call(self, node):
- if isinstance(node.func, ast.Attribute):
- raise simpleeval.FeatureNotAvailable(
- "No methods please, we're British"
- )
- return super(EvalNoMethods, self)._eval_call(node)
-
- e = EvalNoMethods()
-
- self.assertEqual(e.eval('"stuff happens"'), "stuff happens")
- self.assertEqual(e.eval("22 + 20"), 42)
- self.assertEqual(e.eval('int("42")'), 42)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- e.eval('" blah ".strip()')
-
-
-class TestExceptions(unittest.TestCase):
- """
- confirm a few attributes exist properly and haven't been
- eaten by 2to3 or whatever... (see #41)
- """
-
- def test_functionnotdefined(self):
- try:
- raise FunctionNotDefined("foo", "foo in bar")
- except FunctionNotDefined as e:
- assert hasattr(e, "func_name")
- assert getattr(e, "func_name") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
- def test_namenotdefined(self):
- try:
- raise NameNotDefined("foo", "foo in bar")
- except NameNotDefined as e:
- assert hasattr(e, "name")
- assert getattr(e, "name") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
- def test_attributedoesnotexist(self):
- try:
- raise AttributeDoesNotExist("foo", "foo in bar")
- except AttributeDoesNotExist as e:
- assert hasattr(e, "attr")
- assert getattr(e, "attr") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
-
-class TestUnusualComparisons(DRYTest):
- def test_custom_comparison_returner(self):
- class Blah(object):
- def __gt__(self, other):
- return self
-
- b = Blah()
- self.s.names = {"b": b}
- self.t("b > 2", b)
-
- def test_custom_comparison_doesnt_return_boolable(self):
- """
- SqlAlchemy, bless it's cotton socks, returns BinaryExpression objects
- when asking for comparisons between things. These BinaryExpressions
- raise a TypeError if you try and check for Truthyiness.
- """
-
- class BinaryExpression(object):
- def __init__(self, value):
- self.value = value
-
- def __eq__(self, other):
- return self.value == getattr(other, "value", other)
-
- def __repr__(self):
- return "".format(self.value)
-
- def __bool__(self):
- # This is the only important part, to match SqlAlchemy - the rest
- # of the methods are just to make testing a bit easier...
- raise TypeError("Boolean value of this clause is not defined")
-
- class Blah(object):
- def __gt__(self, other):
- return BinaryExpression("GT")
-
- def __lt__(self, other):
- return BinaryExpression("LT")
-
- b = Blah()
- self.s.names = {"b": b}
- # This should not crash:
- e = eval("b > 2", self.s.names)
-
- self.t("b > 2", BinaryExpression("GT"))
- self.t("1 < 5 > b", BinaryExpression("LT"))
-
-
-class TestGetItemUnhappy(DRYTest):
- # Again, SqlAlchemy doing unusual things. Throwing it's own errors, rather than
- # expected types...
-
- def test_getitem_not_implemented(self):
- class Meh(object):
- def __getitem__(self, key):
- raise NotImplementedError("booya!")
-
- def __getattr__(self, key):
- return 42
-
- m = Meh()
-
- self.assertEqual(m.anything, 42)
- with self.assertRaises(NotImplementedError):
- m["nothing"]
-
- self.s.names = {"m": m}
- self.t("m.anything", 42)
-
- with self.assertRaises(NotImplementedError):
- self.t("m['nothing']", None)
-
- self.s.ATTR_INDEX_FALLBACK = False
-
- self.t("m.anything", 42)
-
- with self.assertRaises(NotImplementedError):
- self.t("m['nothing']", None)
-
-
-class TestShortCircuiting(DRYTest):
- def test_shortcircuit_if(self):
- x = []
-
- def foo(y):
- x.append(y)
- return y
-
- self.s.functions = {"foo": foo}
- self.t("foo(1) if foo(2) else foo(3)", 1)
- self.assertListEqual(x, [2, 1])
-
- x = []
- self.t("42 if True else foo(99)", 42)
- self.assertListEqual(x, [])
-
- def test_shortcircuit_comparison(self):
- x = []
-
- def foo(y):
- x.append(y)
- return y
-
- self.s.functions = {"foo": foo}
- self.t("foo(11) < 12", True)
- self.assertListEqual(x, [11])
- x = []
-
- self.t("1 > 2 < foo(22)", False)
- self.assertListEqual(x, [])
-
-
-class TestDisallowedFunctions(DRYTest):
- def test_functions_are_disallowed_at_init(self):
- DISALLOWED = [
- type,
- isinstance,
- eval,
- getattr,
- setattr,
- help,
- repr,
- compile,
- open,
- ]
- if simpleeval.PYTHON3:
- exec("DISALLOWED.append(exec)") # exec is not a function in Python2...
-
- for f in simpleeval.DISALLOW_FUNCTIONS:
- assert f in DISALLOWED
-
- for x in DISALLOWED:
- with self.assertRaises(FeatureNotAvailable):
- s = SimpleEval(functions={"foo": x})
-
- def test_functions_are_disallowed_in_expressions(self):
- DISALLOWED = [
- type,
- isinstance,
- eval,
- getattr,
- setattr,
- help,
- repr,
- compile,
- open,
- ]
-
- if simpleeval.PYTHON3:
- exec("DISALLOWED.append(exec)") # exec is not a function in Python2...
-
- for f in simpleeval.DISALLOW_FUNCTIONS:
- assert f in DISALLOWED
-
- DF = simpleeval.DEFAULT_FUNCTIONS.copy()
-
- for x in DISALLOWED:
- simpleeval.DEFAULT_FUNCTIONS = DF.copy()
- with self.assertRaises(FeatureNotAvailable):
- s = SimpleEval()
- s.functions["foo"] = x
- s.eval("foo(42)")
-
- simpleeval.DEFAULT_FUNCTIONS = DF.copy()
-
-
-if __name__ == "__main__": # pragma: no cover
- unittest.main()
diff --git a/tests/core/test_streams.py b/tests/core/test_streams.py
index 34bbc75141..8a415e55d9 100644
--- a/tests/core/test_streams.py
+++ b/tests/core/test_streams.py
@@ -10,71 +10,23 @@
import requests
from singer_sdk._singerlib import Catalog, MetadataMapping
+from singer_sdk.exceptions import (
+ InvalidReplicationKeyException,
+)
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers.jsonpath import _compile_jsonpath, extract_jsonpath
from singer_sdk.pagination import first
-from singer_sdk.streams.core import (
- REPLICATION_FULL_TABLE,
- REPLICATION_INCREMENTAL,
- Stream,
-)
+from singer_sdk.streams.core import REPLICATION_FULL_TABLE, REPLICATION_INCREMENTAL
from singer_sdk.streams.graphql import GraphQLStream
from singer_sdk.streams.rest import RESTStream
-from singer_sdk.tap_base import Tap
-from singer_sdk.typing import (
- DateTimeType,
- IntegerType,
- PropertiesList,
- Property,
- StringType,
-)
+from singer_sdk.typing import IntegerType, PropertiesList, Property, StringType
+from tests.core.conftest import SimpleTestStream
CONFIG_START_DATE = "2021-01-01"
-
-class SimpleTestStream(Stream):
- """Test stream class."""
-
- name = "test"
- schema = PropertiesList(
- Property("id", IntegerType, required=True),
- Property("value", StringType, required=True),
- Property("updatedAt", DateTimeType, required=True),
- ).to_dict()
- replication_key = "updatedAt"
-
- def __init__(self, tap: Tap):
- """Create a new stream."""
- super().__init__(tap, schema=self.schema, name=self.name)
-
- def get_records(
- self,
- context: dict | None, # noqa: ARG002
- ) -> t.Iterable[dict[str, t.Any]]:
- """Generate records."""
- yield {"id": 1, "value": "Egypt"}
- yield {"id": 2, "value": "Germany"}
- yield {"id": 3, "value": "India"}
-
-
-class UnixTimestampIncrementalStream(SimpleTestStream):
- name = "unix_ts"
- schema = PropertiesList(
- Property("id", IntegerType, required=True),
- Property("value", StringType, required=True),
- Property("updatedAt", IntegerType, required=True),
- ).to_dict()
- replication_key = "updatedAt"
-
-
-class UnixTimestampIncrementalStream2(UnixTimestampIncrementalStream):
- name = "unix_ts_override"
-
- def compare_start_date(self, value: str, start_date_value: str) -> str:
- """Compare a value to a start date value."""
-
- start_timestamp = pendulum.parse(start_date_value).format("X")
- return max(value, start_timestamp, key=float)
+if t.TYPE_CHECKING:
+ from singer_sdk import Stream, Tap
+ from tests.core.conftest import SimpleTestTap
class RestTestStream(RESTStream):
@@ -121,43 +73,13 @@ class GraphqlTestStream(GraphQLStream):
replication_key = "updatedAt"
-class SimpleTestTap(Tap):
- """Test tap class."""
-
- name = "test-tap"
- settings_jsonschema = PropertiesList(Property("start_date", DateTimeType)).to_dict()
-
- def discover_streams(self) -> list[Stream]:
- """List all streams."""
- return [
- SimpleTestStream(self),
- UnixTimestampIncrementalStream(self),
- UnixTimestampIncrementalStream2(self),
- ]
-
-
-@pytest.fixture
-def tap() -> SimpleTestTap:
- """Tap instance."""
- return SimpleTestTap(
- config={"start_date": CONFIG_START_DATE},
- parse_env_config=False,
- )
-
-
-@pytest.fixture
-def stream(tap: SimpleTestTap) -> SimpleTestStream:
- """Create a new stream instance."""
- return t.cast(SimpleTestStream, tap.load_streams()[0])
-
-
@pytest.fixture
-def unix_timestamp_stream(tap: SimpleTestTap) -> UnixTimestampIncrementalStream:
+def stream(tap):
"""Create a new stream instance."""
- return t.cast(UnixTimestampIncrementalStream, tap.load_streams()[1])
+ return tap.load_streams()[0]
-def test_stream_apply_catalog(stream: SimpleTestStream):
+def test_stream_apply_catalog(stream: Stream):
"""Applying a catalog to a stream should overwrite fields."""
assert stream.primary_keys == []
assert stream.replication_key == "updatedAt"
@@ -248,7 +170,7 @@ def test_stream_apply_catalog(stream: SimpleTestStream):
],
)
def test_stream_starting_timestamp(
- tap: SimpleTestTap,
+ tap: Tap,
stream_name: str,
bookmark_value: str,
expected_starting_value: t.Any,
@@ -275,6 +197,24 @@ def test_stream_starting_timestamp(
assert get_starting_value(None) == expected_starting_value
+def test_stream_invalid_replication_key(tap: SimpleTestTap):
+ """Validate an exception is raised if replication_key not in schema."""
+
+ class InvalidReplicationKeyStream(SimpleTestStream):
+ replication_key = "INVALID"
+
+ stream = InvalidReplicationKeyStream(tap)
+
+ with pytest.raises(
+ InvalidReplicationKeyException,
+ match=(
+ f"Field '{stream.replication_key}' is not in schema for stream "
+ f"'{stream.name}'"
+ ),
+ ):
+ _check = stream.is_timestamp_replication_key
+
+
@pytest.mark.parametrize(
"path,content,result",
[
@@ -332,12 +272,7 @@ def test_stream_starting_timestamp(
"nested_values",
],
)
-def test_jsonpath_rest_stream(
- tap: SimpleTestTap,
- path: str,
- content: str,
- result: list[dict],
-):
+def test_jsonpath_rest_stream(tap: Tap, path: str, content: str, result: list[dict]):
"""Validate records are extracted correctly from the API response."""
fake_response = requests.Response()
fake_response._content = str.encode(content)
@@ -350,7 +285,7 @@ def test_jsonpath_rest_stream(
assert list(records) == result
-def test_jsonpath_graphql_stream_default(tap: SimpleTestTap):
+def test_jsonpath_graphql_stream_default(tap: Tap):
"""Validate graphql JSONPath, defaults to the stream name."""
content = """{
"data": {
@@ -370,7 +305,7 @@ def test_jsonpath_graphql_stream_default(tap: SimpleTestTap):
assert list(records) == [{"id": 1, "value": "abc"}, {"id": 2, "value": "def"}]
-def test_jsonpath_graphql_stream_override(tap: SimpleTestTap):
+def test_jsonpath_graphql_stream_override(tap: Tap):
"""Validate graphql jsonpath can be updated."""
content = """[
{"id": 1, "value": "abc"},
@@ -457,7 +392,7 @@ def records_jsonpath(cls): # noqa: N805
],
)
def test_next_page_token_jsonpath(
- tap: SimpleTestTap,
+ tap: Tap,
path: str,
content: str,
headers: dict,
@@ -489,7 +424,7 @@ def test_cached_jsonpath():
assert recompiled is compiled
-def test_sync_costs_calculation(tap: SimpleTestTap, caplog):
+def test_sync_costs_calculation(tap: Tap, caplog):
"""Test sync costs are added up correctly."""
fake_request = requests.PreparedRequest()
fake_response = requests.Response()
@@ -574,7 +509,7 @@ def calculate_test_cost(
),
],
)
-def test_stream_class_selection(input_catalog, selection):
+def test_stream_class_selection(tap_class, input_catalog, selection):
"""Test stream class selection."""
class SelectedStream(RESTStream):
@@ -586,11 +521,12 @@ class UnselectedStream(SelectedStream):
name = "unselected_stream"
selected_by_default = False
- class MyTap(SimpleTestTap):
+ class MyTap(tap_class):
def discover_streams(self):
return [SelectedStream(self), UnselectedStream(self)]
# Check that the selected stream is selected
- tap = MyTap(config=None, catalog=input_catalog)
- for stream in selection:
- assert tap.streams[stream].selected is selection[stream]
+ tap = MyTap(config=None, catalog=input_catalog, validate_config=False)
+ assert all(
+ tap.streams[stream].selected is selection[stream] for stream in selection
+ )
diff --git a/tests/core/test_tap_class.py b/tests/core/test_tap_class.py
new file mode 100644
index 0000000000..93015fbb11
--- /dev/null
+++ b/tests/core/test_tap_class.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+import json
+import typing as t
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from singer_sdk.exceptions import ConfigValidationError
+
+if t.TYPE_CHECKING:
+ from singer_sdk import Tap
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'username' is a required property", "'password' is a required property"],
+ id="missing_username_and_password",
+ ),
+ pytest.param(
+ {"username": "utest"},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'password' is a required property"],
+ id="missing_password",
+ ),
+ pytest.param(
+ {"username": "utest", "password": "ptest", "extra": "not valid"},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["Additional properties are not allowed ('extra' was unexpected)"],
+ id="extra_property",
+ ),
+ pytest.param(
+ {"username": "utest", "password": "ptest"},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(
+ tap_class: type[Tap],
+ config_dict: dict,
+ expectation,
+ errors: list[str],
+):
+ with expectation as exc:
+ tap_class(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli(tap_class: type[Tap]):
+ """Test the CLI."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(tap_class.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tap_class: type[Tap], tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(tap_class.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'username' is a required property" in result.stderr
+ assert "'password' is a required property" in result.stderr
+
+
+def test_cli_discover(tap_class: type[Tap], tmp_path):
+ """Test the CLI discover command."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(
+ tap_class.cli,
+ [
+ "--config",
+ str(config_path),
+ "--discover",
+ ],
+ )
+ assert result.exit_code == 0
+ assert "streams" in json.loads(result.stdout)
diff --git a/tests/core/test_target_base.py b/tests/core/test_target_base.py
index 1fd6b9a93b..de344c7e3f 100644
--- a/tests/core/test_target_base.py
+++ b/tests/core/test_target_base.py
@@ -4,8 +4,12 @@
import pytest
-from singer_sdk.exceptions import MissingKeyPropertiesError
-from tests.conftest import BatchSinkMock, TargetMock
+from singer_sdk.exceptions import (
+ MissingKeyPropertiesError,
+ RecordsWithoutSchemaException,
+)
+from singer_sdk.helpers.capabilities import PluginCapabilities
+from tests.conftest import BatchSinkMock, SQLSinkMock, SQLTargetMock, TargetMock
def test_get_sink():
@@ -53,3 +57,87 @@ def test_validate_record():
# Test invalid record
with pytest.raises(MissingKeyPropertiesError):
sink._singer_validate_message({"name": "test"})
+
+
+def test_target_about_info():
+ target = TargetMock()
+ about = target._get_about_info()
+
+ assert about.capabilities == [
+ PluginCapabilities.ABOUT,
+ PluginCapabilities.STREAM_MAPS,
+ PluginCapabilities.FLATTENING,
+ PluginCapabilities.BATCH,
+ ]
+
+ assert "stream_maps" in about.settings["properties"]
+ assert "stream_map_config" in about.settings["properties"]
+ assert "flattening_enabled" in about.settings["properties"]
+ assert "flattening_max_depth" in about.settings["properties"]
+ assert "batch_config" in about.settings["properties"]
+ assert "add_record_metadata" in about.settings["properties"]
+
+
+def test_sql_get_sink():
+ input_schema_1 = {
+ "properties": {
+ "id": {
+ "type": ["string", "null"],
+ },
+ "col_ts": {
+ "format": "date-time",
+ "type": ["string", "null"],
+ },
+ },
+ }
+ input_schema_2 = copy.deepcopy(input_schema_1)
+ key_properties = []
+ target = SQLTargetMock(config={"sqlalchemy_url": "sqlite:///"})
+ sink = SQLSinkMock(
+ target=target,
+ stream_name="foo",
+ schema=input_schema_1,
+ key_properties=key_properties,
+ connector=target.target_connector,
+ )
+ target._sinks_active["foo"] = sink
+ sink_returned = target.get_sink(
+ "foo",
+ schema=input_schema_2,
+ key_properties=key_properties,
+ )
+ assert sink_returned is sink
+
+
+def test_add_sqlsink_and_get_sink():
+ input_schema_1 = {
+ "properties": {
+ "id": {
+ "type": ["string", "null"],
+ },
+ "col_ts": {
+ "format": "date-time",
+ "type": ["string", "null"],
+ },
+ },
+ }
+ input_schema_2 = copy.deepcopy(input_schema_1)
+ key_properties = []
+ target = SQLTargetMock(config={"sqlalchemy_url": "sqlite:///"})
+ sink = target.add_sqlsink(
+ "foo",
+ schema=input_schema_2,
+ key_properties=key_properties,
+ )
+
+ sink_returned = target.get_sink(
+ "foo",
+ )
+
+ assert sink_returned is sink
+
+ # Test invalid call
+ with pytest.raises(RecordsWithoutSchemaException):
+ target.get_sink(
+ "bar",
+ )
diff --git a/tests/core/test_target_class.py b/tests/core/test_target_class.py
new file mode 100644
index 0000000000..f84ae1dae4
--- /dev/null
+++ b/tests/core/test_target_class.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import json
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from samples.sample_target_sqlite import SQLiteTarget
+from singer_sdk.exceptions import ConfigValidationError
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'path_to_db' is a required property"],
+ id="missing_path_to_db",
+ ),
+ pytest.param(
+ {"path_to_db": "sqlite://test.db"},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(config_dict: dict, expectation, errors: list[str]):
+ with expectation as exc:
+ SQLiteTarget(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli():
+ """Test the CLI."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(SQLiteTarget.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(SQLiteTarget.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'path_to_db' is a required property" in result.stderr
diff --git a/tests/core/test_testing.py b/tests/core/test_testing.py
new file mode 100644
index 0000000000..5715cd1e1d
--- /dev/null
+++ b/tests/core/test_testing.py
@@ -0,0 +1,43 @@
+"""Test the plugin testing helpers."""
+
+from __future__ import annotations
+
+import pytest
+
+from singer_sdk.testing.factory import BaseTestClass
+
+
+def test_module_deprecations():
+ with pytest.deprecated_call():
+ from singer_sdk.testing import get_standard_tap_tests # noqa: F401
+
+ with pytest.deprecated_call():
+ from singer_sdk.testing import get_standard_target_tests # noqa: F401
+
+ from singer_sdk import testing
+
+ with pytest.raises(
+ AttributeError,
+ match="module singer_sdk.testing has no attribute",
+ ):
+ testing.foo # noqa: B018
+
+
+def test_test_class_mro():
+ class PluginTestClass(BaseTestClass):
+ pass
+
+ PluginTestClass.params["x"] = 1
+
+ class AnotherPluginTestClass(BaseTestClass):
+ pass
+
+ AnotherPluginTestClass.params["x"] = 2
+ AnotherPluginTestClass.params["y"] = 3
+
+ class SubPluginTestClass(PluginTestClass):
+ pass
+
+ assert PluginTestClass.params == {"x": 1}
+ assert AnotherPluginTestClass.params == {"x": 2, "y": 3}
+ assert SubPluginTestClass.params == {"x": 1}
diff --git a/tests/samples/conftest.py b/tests/samples/conftest.py
index 29560a330a..c1467d791b 100644
--- a/tests/samples/conftest.py
+++ b/tests/samples/conftest.py
@@ -34,12 +34,7 @@ def _sqlite_sample_db(sqlite_connector):
@pytest.fixture
-def sqlite_sample_tap(
- _sqlite_sample_db,
- sqlite_sample_db_config,
- sqlite_sample_db_state,
-) -> SQLiteTap:
- _ = _sqlite_sample_db
+def sqlite_sample_db_catalog(sqlite_sample_db_config) -> Catalog:
catalog_obj = Catalog.from_dict(
_get_tap_catalog(SQLiteTap, config=sqlite_sample_db_config, select_all=True),
)
@@ -55,9 +50,20 @@ def sqlite_sample_tap(
t2.key_properties = ["c1"]
t2.replication_key = "c1"
t2.replication_method = "INCREMENTAL"
+ return catalog_obj
+
+
+@pytest.fixture
+def sqlite_sample_tap(
+ _sqlite_sample_db,
+ sqlite_sample_db_config,
+ sqlite_sample_db_state,
+ sqlite_sample_db_catalog,
+) -> SQLiteTap:
+ _ = _sqlite_sample_db
return SQLiteTap(
config=sqlite_sample_db_config,
- catalog=catalog_obj.to_dict(),
+ catalog=sqlite_sample_db_catalog.to_dict(),
state=sqlite_sample_db_state,
)
diff --git a/tests/samples/test_tap_sqlite.py b/tests/samples/test_tap_sqlite.py
index bceaa34b1e..b5ed7b5493 100644
--- a/tests/samples/test_tap_sqlite.py
+++ b/tests/samples/test_tap_sqlite.py
@@ -1,8 +1,11 @@
from __future__ import annotations
+import datetime
import json
import typing as t
+import pytest
+import time_machine
from click.testing import CliRunner
from samples.sample_tap_sqlite import SQLiteTap
@@ -11,6 +14,7 @@
from singer_sdk._singerlib import MetadataMapping, StreamMetadata
from singer_sdk.testing import (
get_standard_tap_tests,
+ tap_sync_test,
tap_to_target_sync_test,
)
@@ -48,7 +52,7 @@ def test_tap_sqlite_cli(sqlite_sample_db_config: dict[str, t.Any], tmp_path: Pat
def test_sql_metadata(sqlite_sample_tap: SQLTap):
stream = t.cast(SQLStream, sqlite_sample_tap.streams["main-t1"])
detected_metadata = stream.catalog_entry["metadata"]
- detected_root_md = [md for md in detected_metadata if md["breadcrumb"] == []][0]
+ detected_root_md = next(md for md in detected_metadata if md["breadcrumb"] == [])
detected_root_md = detected_root_md["metadata"]
translated_metadata = StreamMetadata.from_dict(detected_root_md)
assert detected_root_md["schema-name"] == translated_metadata.schema_name
@@ -116,3 +120,27 @@ def test_sync_sqlite_to_csv(sqlite_sample_tap: SQLTap, tmp_path: Path):
sqlite_sample_tap,
SampleTargetCSV(config={"target_folder": f"{tmp_path}/"}),
)
+
+
+@pytest.fixture
+@time_machine.travel(
+ datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+)
+def sqlite_sample_tap_state_messages(sqlite_sample_tap: SQLTap) -> list[dict]:
+ stdout, _ = tap_sync_test(sqlite_sample_tap)
+ state_messages = []
+ for line in stdout.readlines():
+ message = json.loads(line)
+ if message["type"] == "STATE":
+ state_messages.append(message)
+
+ return state_messages
+
+
+def test_sqlite_state(sqlite_sample_tap_state_messages):
+ assert all(
+ "progress_markers" not in bookmark
+ for message in sqlite_sample_tap_state_messages
+ for bookmark in message["value"]["bookmarks"].values()
+ )
diff --git a/tests/samples/test_target_csv.py b/tests/samples/test_target_csv.py
index 715edbb65f..e55aa3cbcf 100644
--- a/tests/samples/test_target_csv.py
+++ b/tests/samples/test_target_csv.py
@@ -1,6 +1,7 @@
"""Test tap-to-target sync."""
from __future__ import annotations
+import datetime
import json
import shutil
import typing as t
@@ -8,8 +9,8 @@
from pathlib import Path
import pytest
+import time_machine
from click.testing import CliRunner
-from freezegun import freeze_time
from samples.sample_mapper.mapper import StreamTransform
from samples.sample_tap_countries.countries_tap import SampleTapCountries
@@ -77,12 +78,33 @@ def test_target_batching():
buf, _ = tap_sync_test(tap)
- mocked_starttime = "2012-01-01 12:00:00"
- mocked_jumptotime2 = "2012-01-01 12:31:00"
- mocked_jumptotime3 = "2012-01-01 13:02:00"
+ mocked_starttime = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 12,
+ 0,
+ tzinfo=datetime.timezone.utc,
+ )
+ mocked_jumptotime2 = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 12,
+ 31,
+ tzinfo=datetime.timezone.utc,
+ )
+ mocked_jumptotime3 = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 13,
+ 2,
+ tzinfo=datetime.timezone.utc,
+ )
countries_record_count = 257
- with freeze_time(mocked_starttime):
+ with time_machine.travel(mocked_starttime, tick=False):
target = TargetMock(config={})
target.max_parallelism = 1 # Limit unit test to 1 process
assert target.num_records_processed == 0
@@ -96,7 +118,7 @@ def test_target_batching():
assert len(target.records_written) == 0 # Drain not yet called
assert len(target.state_messages_written) == 0 # Drain not yet called
- with freeze_time(mocked_jumptotime2):
+ with time_machine.travel(mocked_jumptotime2, tick=False):
buf.seek(0)
target_sync_test(target, buf, finalize=False)
@@ -105,7 +127,7 @@ def test_target_batching():
assert len(target.records_written) == countries_record_count + 1
assert len(target.state_messages_written) == 1
- with freeze_time(mocked_jumptotime3):
+ with time_machine.travel(mocked_jumptotime3, tick=False):
buf.seek(0)
target_sync_test(target, buf, finalize=False)
diff --git a/tests/samples/test_target_sqlite.py b/tests/samples/test_target_sqlite.py
index a7ca3b3c5b..a66805a092 100644
--- a/tests/samples/test_target_sqlite.py
+++ b/tests/samples/test_target_sqlite.py
@@ -19,13 +19,13 @@
from samples.sample_target_sqlite import SQLiteSink, SQLiteTarget
from singer_sdk import typing as th
from singer_sdk.testing import (
- _get_tap_catalog,
tap_sync_test,
tap_to_target_sync_test,
target_sync_test,
)
if t.TYPE_CHECKING:
+ from singer_sdk._singerlib import Catalog
from singer_sdk.tap_base import SQLTap
from singer_sdk.target_base import SQLTarget
@@ -36,7 +36,7 @@ def path_to_target_db(tmp_path: Path) -> Path:
@pytest.fixture
-def sqlite_target_test_config(path_to_target_db: str) -> dict:
+def sqlite_target_test_config(path_to_target_db: Path) -> dict:
"""Get configuration dictionary for target-csv."""
return {"path_to_db": str(path_to_target_db)}
@@ -67,6 +67,7 @@ def sqlite_sample_target_batch(sqlite_target_test_config):
def test_sync_sqlite_to_sqlite(
sqlite_sample_tap: SQLTap,
sqlite_sample_target: SQLTarget,
+ sqlite_sample_db_catalog: Catalog,
):
"""End-to-end-to-end test for SQLite tap and target.
@@ -84,8 +85,10 @@ def test_sync_sqlite_to_sqlite(
)
orig_stdout.seek(0)
tapped_config = dict(sqlite_sample_target.config)
- catalog = _get_tap_catalog(SQLiteTap, config=tapped_config, select_all=True)
- tapped_target = SQLiteTap(config=tapped_config, catalog=catalog)
+ tapped_target = SQLiteTap(
+ config=tapped_config,
+ catalog=sqlite_sample_db_catalog.to_dict(),
+ )
new_stdout, _ = tap_sync_test(tapped_target)
orig_stdout.seek(0)
@@ -396,6 +399,35 @@ def test_sqlite_column_no_morph(sqlite_sample_target: SQLTarget):
target_sync_test(sqlite_sample_target, input=StringIO(tap_output_b), finalize=True)
+def test_record_with_missing_properties(
+ sqlite_sample_target: SQLTarget,
+):
+ """Test handling of records with missing properties."""
+ tap_output = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ {
+ "type": "SCHEMA",
+ "stream": "test_stream",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "name": {"type": "string"},
+ },
+ },
+ "key_properties": ["id"],
+ },
+ {
+ "type": "RECORD",
+ "stream": "test_stream",
+ "record": {"id": 1},
+ },
+ ]
+ )
+ target_sync_test(sqlite_sample_target, input=StringIO(tap_output), finalize=True)
+
+
@pytest.mark.parametrize(
"stream_name,schema,key_properties,expected_dml",
[
@@ -479,3 +511,48 @@ def test_hostile_to_sqlite(
"hname_starts_with_number",
"name_with_emoji_",
}
+
+
+def test_overwrite_load_method(
+ sqlite_target_test_config: dict,
+):
+ sqlite_target_test_config["load_method"] = "overwrite"
+ target = SQLiteTarget(config=sqlite_target_test_config)
+ test_tbl = f"zzz_tmp_{str(uuid4()).split('-')[-1]}"
+ schema_msg = {
+ "type": "SCHEMA",
+ "stream": test_tbl,
+ "schema": {
+ "type": "object",
+ "properties": {"col_a": th.StringType().to_dict()},
+ },
+ }
+
+ tap_output_a = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ schema_msg,
+ {"type": "RECORD", "stream": test_tbl, "record": {"col_a": "123"}},
+ ]
+ )
+ # Assert
+ db = sqlite3.connect(sqlite_target_test_config["path_to_db"])
+ cursor = db.cursor()
+
+ target_sync_test(target, input=StringIO(tap_output_a), finalize=True)
+ cursor.execute(f"SELECT col_a FROM {test_tbl} ;") # noqa: S608
+ records = [res[0] for res in cursor.fetchall()]
+ assert records == ["123"]
+
+ tap_output_b = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ schema_msg,
+ {"type": "RECORD", "stream": test_tbl, "record": {"col_a": "456"}},
+ ]
+ )
+ target = SQLiteTarget(config=sqlite_target_test_config)
+ target_sync_test(target, input=StringIO(tap_output_b), finalize=True)
+ cursor.execute(f"SELECT col_a FROM {test_tbl} ;") # noqa: S608
+ records = [res[0] for res in cursor.fetchall()]
+ assert records == ["456"]
diff --git a/tests/snapshots/mapped_stream/aliased_stream.jsonl b/tests/snapshots/mapped_stream/aliased_stream.jsonl
index 46d5daffe8..8df28ddf45 100644
--- a/tests/snapshots/mapped_stream/aliased_stream.jsonl
+++ b/tests/snapshots/mapped_stream/aliased_stream.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "aliased_stream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "aliased_stream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/drop_property.jsonl b/tests/snapshots/mapped_stream/drop_property.jsonl
index 8694f47368..aece20a309 100644
--- a/tests/snapshots/mapped_stream/drop_property.jsonl
+++ b/tests/snapshots/mapped_stream/drop_property.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/drop_property_null_string.jsonl b/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
index 8694f47368..aece20a309 100644
--- a/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
+++ b/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_all.jsonl b/tests/snapshots/mapped_stream/flatten_all.jsonl
index c54db15632..e588c9dc15 100644
--- a/tests/snapshots/mapped_stream/flatten_all.jsonl
+++ b/tests/snapshots/mapped_stream/flatten_all.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "user__some_numbers": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "user__some_numbers": "[3.14, 2.718]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "user__some_numbers": "[10.32, 1.618]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "user__some_numbers": "[1.414, 1.732]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_depth_0.jsonl b/tests/snapshots/mapped_stream/flatten_depth_0.jsonl
new file mode 100644
index 0000000000..7a8b54581b
--- /dev/null
+++ b/tests/snapshots/mapped_stream/flatten_depth_0.jsonl
@@ -0,0 +1,6 @@
+{"type": "STATE", "value": {}}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_depth_1.jsonl b/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
index 275e3295cc..e3f6cd05f6 100644
--- a/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
+++ b/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub": "{\"num\": 1}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub": "{\"num\": 2}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub": "{\"num\": 3}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub": {"type": ["string", "null"]}, "user__some_numbers": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub": "{\"num\": 1}", "user__some_numbers": "[3.14, 2.718]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub": "{\"num\": 2}", "user__some_numbers": "[10.32, 1.618]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub": "{\"num\": 3}", "user__some_numbers": "[1.414, 1.732]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/keep_all_fields.jsonl b/tests/snapshots/mapped_stream/keep_all_fields.jsonl
index 13ddce438f..da476f6a5b 100644
--- a/tests/snapshots/mapped_stream/keep_all_fields.jsonl
+++ b/tests/snapshots/mapped_stream/keep_all_fields.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/map_and_flatten.jsonl b/tests/snapshots/mapped_stream/map_and_flatten.jsonl
index bf26201848..921094d05f 100644
--- a/tests/snapshots/mapped_stream/map_and_flatten.jsonl
+++ b/tests/snapshots/mapped_stream/map_and_flatten.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": ["email_hash"]}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "user__some_numbers": {"type": ["string", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": ["email_hash"]}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "user__some_numbers": "[3.14, 2.718]", "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "user__some_numbers": "[10.32, 1.618]", "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "user__some_numbers": "[1.414, 1.732]", "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/no_map.jsonl b/tests/snapshots/mapped_stream/no_map.jsonl
index 019b1f9d93..7a8b54581b 100644
--- a/tests/snapshots/mapped_stream/no_map.jsonl
+++ b/tests/snapshots/mapped_stream/no_map.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/record_to_column.jsonl b/tests/snapshots/mapped_stream/record_to_column.jsonl
new file mode 100644
index 0000000000..8fc3efb21b
--- /dev/null
+++ b/tests/snapshots/mapped_stream/record_to_column.jsonl
@@ -0,0 +1,6 @@
+{"type": "STATE", "value": {}}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"type": "object", "properties": {"_data": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_1.jsonl b/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
index e63d03815b..a68add19cd 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl b/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
index e63d03815b..a68add19cd 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_2.jsonl b/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
index 41cce23d75..2345510066 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_2", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_2", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}