From e166bab97374ada5cd7ee236dbccd30995d717f3 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Tue, 24 Dec 2024 10:34:01 +0200
Subject: [PATCH 001/111] Bring code

---
 README.md                                     |  121 +-
 VERSION                                       |    1 +
 docs/conf.py                                  |   52 +
 docs/index.md                                 |   23 +
 noxfile.py                                    |  196 ++
 poetry.lock                                   | 2598 +++++++++++++++++
 pyproject.toml                                |  138 +
 src/sdfs/__init__.py                          |   33 +
 src/sdfs/clients/__init__.py                  |   17 +
 src/sdfs/clients/seismic_dms_client.py        |  474 +++
 src/sdfs/core.py                              |  952 ++++++
 src/sdfs/exceptions.py                        |   48 +
 src/sdfs/providers/__init__.py                |   34 +
 src/sdfs/providers/abstract_provider.py       |   85 +
 src/sdfs/providers/anthos.py                  |   60 +
 src/sdfs/providers/aws.py                     |   60 +
 src/sdfs/providers/azure.py                   |   60 +
 src/sdfs/providers/factory.py                 |   95 +
 src/sdfs/providers/google.py                  |   69 +
 src/sdfs/providers/ibm.py                     |   60 +
 src/sdfs/utils/__init__.py                    |   18 +
 src/sdfs/utils/http_utils.py                  |   31 +
 src/sdfs/utils/validators.py                  |   32 +
 tests/.env.example                            |   11 +
 tests/__init__.py                             |   18 +
 tests/integration/__init__.py                 |   18 +
 tests/integration/conftest.py                 |  137 +
 .../test_sd_file_system_integration.py        |  554 ++++
 .../test_seismic_dms_client_integration.py    |  128 +
 .../test_single_file_sd_integration.py        |  552 ++++
 tests/local_test.txt                          |    1 +
 tests/test_retry_flow.py                      |  102 +
 tests/unit/__init__.py                        |   18 +
 tests/unit/providers/__init__.py              |   18 +
 tests/unit/providers/conftest.py              |   41 +
 tests/unit/providers/test_factory.py          |  105 +
 tests/unit/providers/test_google_provider.py  |   83 +
 tests/unit/sd_file_system_mocks.py            |  504 ++++
 tests/unit/seismic_dms_client_mocks.py        |  123 +
 tests/unit/singleton.py                       |   38 +
 tests/unit/test_sd_file_system.py             |  337 +++
 tests/unit/test_seismic_dms_client.py         |  228 ++
 version.py                                    |   57 +
 43 files changed, 8267 insertions(+), 63 deletions(-)
 create mode 100644 VERSION
 create mode 100644 docs/conf.py
 create mode 100644 docs/index.md
 create mode 100644 noxfile.py
 create mode 100644 poetry.lock
 create mode 100644 pyproject.toml
 create mode 100644 src/sdfs/__init__.py
 create mode 100644 src/sdfs/clients/__init__.py
 create mode 100644 src/sdfs/clients/seismic_dms_client.py
 create mode 100644 src/sdfs/core.py
 create mode 100644 src/sdfs/exceptions.py
 create mode 100644 src/sdfs/providers/__init__.py
 create mode 100644 src/sdfs/providers/abstract_provider.py
 create mode 100644 src/sdfs/providers/anthos.py
 create mode 100644 src/sdfs/providers/aws.py
 create mode 100644 src/sdfs/providers/azure.py
 create mode 100644 src/sdfs/providers/factory.py
 create mode 100644 src/sdfs/providers/google.py
 create mode 100644 src/sdfs/providers/ibm.py
 create mode 100644 src/sdfs/utils/__init__.py
 create mode 100644 src/sdfs/utils/http_utils.py
 create mode 100644 src/sdfs/utils/validators.py
 create mode 100644 tests/.env.example
 create mode 100644 tests/__init__.py
 create mode 100644 tests/integration/__init__.py
 create mode 100644 tests/integration/conftest.py
 create mode 100644 tests/integration/test_sd_file_system_integration.py
 create mode 100644 tests/integration/test_seismic_dms_client_integration.py
 create mode 100644 tests/integration/test_single_file_sd_integration.py
 create mode 100644 tests/local_test.txt
 create mode 100644 tests/test_retry_flow.py
 create mode 100644 tests/unit/__init__.py
 create mode 100644 tests/unit/providers/__init__.py
 create mode 100644 tests/unit/providers/conftest.py
 create mode 100644 tests/unit/providers/test_factory.py
 create mode 100644 tests/unit/providers/test_google_provider.py
 create mode 100644 tests/unit/sd_file_system_mocks.py
 create mode 100644 tests/unit/seismic_dms_client_mocks.py
 create mode 100644 tests/unit/singleton.py
 create mode 100644 tests/unit/test_sd_file_system.py
 create mode 100644 tests/unit/test_seismic_dms_client.py
 create mode 100644 version.py

diff --git a/README.md b/README.md
index 26c6589..667f17b 100644
--- a/README.md
+++ b/README.md
@@ -4,92 +4,87 @@ SDFS - low-level pythonic fsspec implementation for SDMS
 
 See ADR: [Contribution of MDIO Components to OSDU Seismic DMS](https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/seismic/home/-/issues/21)
 
-## Getting started
+Pythonic file-system for OSDU Seismic Store
 
-To make it easy for you to get started with GitLab, here's a list of recommended next steps.
+# Features
 
-Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)!
+- Abstract filesystem implementation for OSDU Data Platform Seismic DMS Service.
+- Cloud native OSDU SeismicDmsClient for manage datasets.
 
-## Add your files
-
-- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files
-- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command:
-
-```
-cd existing_repo
-git remote add origin https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/seismic/seismic-dms-suite/seismic-dms-sdfs.git
-git branch -M main
-git push -uf origin main
+# Install dependencies
 ```
+# Clone the repository
+# Create virtual environment with a python version <3.11 and >=3.9
+python -m venv .venv
 
-## Integrate with your tools
-
-- [ ] [Set up project integrations](https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/seismic/seismic-dms-suite/seismic-dms-sdfs/-/settings/integrations)
-
-## Collaborate with your team
+# Activate virtual environment
+source .venv/Scripts/activate   # On Windows
+source .venv/bin/activate       # On Linux
 
-- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/)
-- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html)
-- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically)
-- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/)
-- [ ] [Set auto-merge](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html)
-
-## Test and Deploy
-
-Use the built-in continuous integration in GitLab.
+# Install dependencies
+poetry install
+```
 
-- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html)
-- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/)
-- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html)
-- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/)
-- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html)
+# Testing
 
-***
+## How to test the project
 
-# Editing this README
+Run the full test suite:
 
-When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thanks to [makeareadme.com](https://www.makeareadme.com/) for this template.
+```console
+$ nox
+```
 
-## Suggestions for a good README
+List the available Nox sessions:
 
-Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
+```console
+$ nox --list-sessions
+```
 
-## Name
-Choose a self-explaining name for your project.
+* You can also run a specific Nox session.
+For example, invoke the unit test suite like this:
 
-## Description
-Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
+```console
+$ nox --session=unit-tests
+```
 
-## Badges
-On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge.
+Unit tests are located in the _tests_/_unit_ directory.
 
-## Visuals
-Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
+* For integration testing you need prepare env variables:
 
-## Installation
-Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
+```
+<!-- Seismic related vars -->
+export SD_PATH=... (default value: sd://osdu/osdu-mdio/autotest_path/integration)
+export SEISMIC_STORE_URL=... (default value: https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3)
+
+<!-- For access token use case -->
+export ACCESS_TOKEN=...
+
+<!-- For refresh token use case -->
+export AUTH_REFRESH_TOKEN_URL=... (have default google auth URL)
+export REFRESH_TOKEN=...
+export CLIENT_ID=...
+export CLIENT_SECRET=...
+```
 
-## Usage
-Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
+And after that run nox command:
 
-## Support
-Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc.
+```
+$ nox --session=integration-tests
+```
 
-## Roadmap
-If you have ideas for releases in the future, it is a good idea to list them in the README.
+Integration tests are located in the _tests_/_integration_ directory.
 
-## Contributing
-State if you are open to contributions and what your requirements are for accepting them.
+All our tests written using the [pytest] testing framework.
 
-For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self.
+[pytest]: https://pytest.readthedocs.io/
 
-You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser.
+** There is also a test that tests retry flow (this test is extremely long, that is why it was excluded from the CI/CD)
 
-## Authors and acknowledgment
-Show your appreciation to those who have contributed to the project.
+For trigger retry test manually:
 
-## License
-For open source projects, say how it is licensed.
+```
+$ python tests/test_retry_flow.py
+```
 
-## Project status
-If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers.
+And just wait logs output about results.
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..7bcd0e3
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.0.2
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..cf2549a
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,52 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Sphinx configuration."""
+project = "SDFS"
+author = "EPAM"
+copyright = "2023, EPAM"  # noqa: A001
+extensions = [
+    "sphinx.ext.autodoc",
+    "sphinx.ext.napoleon",
+    "sphinx.ext.autosectionlabel",
+    "sphinx_click",
+    "sphinx_copybutton",
+    "myst_nb",
+]
+
+autodoc_typehints = "description"
+autodoc_typehints_format = "short"
+autodoc_member_order = "groupwise"
+autoclass_content = "both"
+autosectionlabel_prefix_document = True
+
+html_theme = "furo"
+
+myst_number_code_blocks = ["python"]
+myst_heading_anchors = 2
+myst_enable_extensions = [
+    "linkify",
+    "replacements",
+    "smartquotes",
+]
+
+# sphinx-copybutton configurations
+copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
+copybutton_line_continuation_character = "\\"
+copybutton_prompt_is_regexp = True
+
+nb_execution_mode = "off"
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..3a7702d
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,23 @@
+```{include} ../README.md
+---
+end-before: <!-- github-only -->
+---
+```
+
+[contributor guide]: contributing
+[command-line usage]: usage
+[api reference]: reference
+[installation instructions]: installation
+
+```{toctree}
+---
+hidden:
+maxdepth: 1
+---
+installation
+usage
+reference
+contributing
+Code of Conduct <codeofconduct>
+License <license>
+```
diff --git a/noxfile.py b/noxfile.py
new file mode 100644
index 0000000..1d25fcc
--- /dev/null
+++ b/noxfile.py
@@ -0,0 +1,196 @@
+"""Nox sessions."""
+import os
+import shutil
+import sys
+from pathlib import Path
+from textwrap import dedent
+
+import nox
+
+try:
+    from nox_poetry import Session, session
+except ImportError:
+    message = f"""\
+    Nox failed to import the 'nox-poetry' package.
+
+    Please install it using the following command:
+
+    {sys.executable} -m pip install nox-poetry"""
+    raise SystemExit(dedent(message)) from None
+
+
+package = "sdfs"
+python_versions = ["3.11", "3.10", "3.9"]
+nox.needs_version = ">= 2022.1.7"
+nox.options.sessions = (
+    "safety",
+    "mypy",
+    "unit-tests",
+    "integration-tests",
+    "typeguard",
+    "xdoctest",
+    "docs-build",
+)
+
+
+@session(python=python_versions[0])
+def safety(session: Session) -> None:
+    """Scan dependencies for insecure packages.
+
+    Args:
+        session (Session): The Session object.
+    """
+    requirements = session.poetry.export_requirements()
+    session.install("safety")
+    session.run("safety", "check", "--full-report", f"--file={requirements}")
+
+
+@session(python=python_versions)
+def mypy(session: Session) -> None:
+    """Type-check using mypy.
+
+    Args:
+        session (Session): The Session object.
+    """
+    args = session.posargs or ["src", "tests", "docs/conf.py"]
+    session.install(".")
+    session.install("mypy", "pytest")
+    session.run("mypy", *args)
+    if not session.posargs:
+        session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py")
+
+
+@session(name="unit-tests", python=python_versions)
+def unit_tests(session: Session) -> None:
+    """Run the test suite.
+
+    Args:
+        session (Session): The Session object.
+    """
+    session.install(".")
+    session.install("coverage[toml]", "pytest", "pygments", "mock")
+    try:
+        session.run("coverage", "run", "--parallel", "-m", "pytest", "tests/unit")
+    finally:
+        if session.interactive:
+            session.notify("coverage", posargs=[])
+
+
+@session(name="integration-tests", python=python_versions)
+def integration_tests(session: Session) -> None:
+    """Run the test suite.
+
+    Args:
+        session (Session): The Session object.
+    """
+    session.install(".")
+    session.install("coverage[toml]", "pytest", "pygments", "mock")
+    try:
+        session.run(
+            "coverage", "run", "--parallel", "-m", "pytest", "tests/integration"
+        )
+    finally:
+        if session.interactive:
+            session.notify("coverage", posargs=[])
+
+
+@session(python=python_versions[0])
+def coverage(session: Session) -> None:
+    """Produce the coverage report.
+
+    Args:
+        session (Session): The Session object.
+    """
+    args = session.posargs or ["report"]
+
+    session.install("coverage[toml]")
+
+    if not session.posargs and any(Path().glob(".coverage.*")):
+        session.run("coverage", "combine")
+
+    session.run("coverage", *args)
+
+
+@session(python=python_versions[0])
+def typeguard(session: Session) -> None:
+    """Runtime type checking using Typeguard.
+
+    Args:
+        session (Session): The Session object.
+    """
+    session.install(".")
+    session.install("pytest", "typeguard", "pygments")
+    session.run("pytest", f"--typeguard-packages={package}", *session.posargs)
+
+
+@session(python=python_versions)
+def xdoctest(session: Session) -> None:
+    """Run examples with xdoctest.
+
+    Args:
+        session (Session): The Session object.
+    """
+    if session.posargs:
+        args = [package, *session.posargs]
+    else:
+        args = [f"--modname={package}", "--command=all"]
+        if "FORCE_COLOR" in os.environ:
+            args.append("--colored=1")
+
+    session.install(".")
+    session.install("xdoctest[colors]")
+    session.run("python", "-m", "xdoctest", *args)
+
+
+@session(name="docs-build", python=python_versions[0])
+def docs_build(session: Session) -> None:
+    """Build the documentation.
+
+    Args:
+        session (Session): The Session object.
+    """
+    args = session.posargs or ["docs", "docs/_build"]
+    if not session.posargs and "FORCE_COLOR" in os.environ:
+        args.insert(0, "--color")
+
+    session.install(".")
+    session.install(
+        "sphinx",
+        "sphinx-click",
+        "sphinx-copybutton",
+        "furo",
+        "myst-nb@git+https://github.com/executablebooks/MyST-NB@35ebd54",
+        "linkify-it-py",
+    )
+
+    build_dir = Path("docs", "_build")
+    if build_dir.exists():
+        shutil.rmtree(build_dir)
+
+    session.run("sphinx-build", *args)
+
+
+@session(python=python_versions[0])
+def docs(session: Session) -> None:
+    """Build and serve the documentation with live reloading on file changes.
+
+    Args:
+        session (Session): The Session object.
+    """
+    args = session.posargs or ["--open-browser", "docs", "docs/_build"]
+    session.install(".")
+    session.install(
+        "sphinx",
+        "sphinx-autobuild",
+        "sphinx-click",
+        "sphinx-copybutton",
+        "furo",
+        "myst-nb@git+https://github.com/executablebooks/MyST-NB@35ebd54",
+        "linkify-it-py",
+    )
+
+    build_dir = Path("docs", "_build")
+    if build_dir.exists():
+        shutil.rmtree(build_dir)
+
+    session.run("sphinx-autobuild", *args)
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..32afde7
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,2598 @@
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.4.4"
+description = "Happy Eyeballs for asyncio"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"},
+    {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.11.10"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"},
+    {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"},
+    {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"},
+    {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"},
+    {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"},
+    {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"},
+    {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"},
+    {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"},
+    {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"},
+    {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"},
+    {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"},
+    {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"},
+    {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"},
+    {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"},
+    {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"},
+    {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"},
+    {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"},
+    {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"},
+    {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"},
+    {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"},
+    {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"},
+    {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"},
+    {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"},
+    {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"},
+    {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"},
+    {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"},
+    {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"},
+    {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"},
+    {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"},
+    {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"},
+    {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"},
+    {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"},
+    {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"},
+    {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"},
+    {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"},
+    {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"},
+    {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"},
+    {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"},
+    {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"},
+    {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"},
+    {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"},
+    {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"},
+    {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"},
+    {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"},
+    {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"},
+    {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"},
+    {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"},
+    {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"},
+    {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"},
+    {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"},
+    {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"},
+    {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"},
+    {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"},
+    {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"},
+    {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"},
+    {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"},
+    {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"},
+    {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"},
+    {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"},
+    {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"},
+    {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"},
+    {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"},
+    {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"},
+    {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"},
+    {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"},
+    {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"},
+    {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"},
+    {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"},
+    {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"},
+    {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"},
+    {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"},
+    {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"},
+    {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"},
+    {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"},
+    {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"},
+    {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"},
+]
+
+[package.dependencies]
+aiohappyeyeballs = ">=2.3.0"
+aiosignal = ">=1.1.2"
+async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""}
+attrs = ">=17.3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+propcache = ">=0.2.0"
+yarl = ">=1.17.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+    {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "alabaster"
+version = "0.7.16"
+description = "A light, configurable Sphinx theme"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"},
+    {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"},
+]
+
+[[package]]
+name = "anyio"
+version = "4.7.0"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"},
+    {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"},
+]
+
+[package.dependencies]
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
+
+[package.extras]
+doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
+trio = ["trio (>=0.26.1)"]
+
+[[package]]
+name = "argcomplete"
+version = "3.5.1"
+description = "Bash tab completion for argparse"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "argcomplete-3.5.1-py3-none-any.whl", hash = "sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363"},
+    {file = "argcomplete-3.5.1.tar.gz", hash = "sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4"},
+]
+
+[package.extras]
+test = ["coverage", "mypy", "pexpect", "ruff", "wheel"]
+
+[[package]]
+name = "async-timeout"
+version = "5.0.1"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
+    {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
+]
+
+[[package]]
+name = "attrs"
+version = "24.2.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
+    {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
+]
+
+[package.extras]
+benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
+
+[[package]]
+name = "babel"
+version = "2.16.0"
+description = "Internationalization utilities"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
+    {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
+]
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+description = "Screen-scraping library"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+    {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
+    {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
+]
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+cchardet = ["cchardet"]
+chardet = ["chardet"]
+charset-normalizer = ["charset-normalizer"]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
+[[package]]
+name = "cachetools"
+version = "5.5.0"
+description = "Extensible memoizing collections and decorators"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
+    {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
+]
+
+[[package]]
+name = "certifi"
+version = "2024.8.30"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
+    {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.0"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+    {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
+    {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
+    {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
+    {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
+    {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
+    {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
+    {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
+    {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
+    {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
+    {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+    {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "colorlog"
+version = "6.9.0"
+description = "Add colours to the output of Python's logging module."
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"},
+    {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
+
+[[package]]
+name = "coverage"
+version = "7.6.9"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
+    {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
+    {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
+    {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
+    {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
+    {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
+    {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
+    {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
+    {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
+    {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
+    {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
+    {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
+    {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
+    {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
+    {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
+    {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
+    {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
+    {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
+    {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
+    {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
+    {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
+    {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
+    {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
+    {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
+    {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
+    {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "decorator"
+version = "5.1.1"
+description = "Decorators for Humans"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+    {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+]
+
+[[package]]
+name = "distlib"
+version = "0.3.9"
+description = "Distribution utilities"
+optional = false
+python-versions = "*"
+files = [
+    {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
+    {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
+]
+
+[[package]]
+name = "docutils"
+version = "0.20.1"
+description = "Docutils -- Python Documentation Utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"},
+    {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"},
+]
+
+[[package]]
+name = "dparse"
+version = "0.6.4"
+description = "A parser for Python dependency files"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "dparse-0.6.4-py3-none-any.whl", hash = "sha256:fbab4d50d54d0e739fbb4dedfc3d92771003a5b9aa8545ca7a7045e3b174af57"},
+    {file = "dparse-0.6.4.tar.gz", hash = "sha256:90b29c39e3edc36c6284c82c4132648eaf28a01863eb3c231c2512196132201a"},
+]
+
+[package.dependencies]
+packaging = "*"
+tomli = {version = "*", markers = "python_version < \"3.11\""}
+
+[package.extras]
+all = ["pipenv", "poetry", "pyyaml"]
+conda = ["pyyaml"]
+pipenv = ["pipenv"]
+poetry = ["poetry"]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.2"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+    {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "filelock"
+version = "3.16.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
+    {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
+]
+
+[package.extras]
+docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
+typing = ["typing-extensions (>=4.12.2)"]
+
+[[package]]
+name = "frozenlist"
+version = "1.5.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"},
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"},
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"},
+    {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"},
+    {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"},
+    {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"},
+    {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"},
+    {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"},
+    {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"},
+    {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"},
+    {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"},
+    {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"},
+    {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"},
+    {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"},
+    {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"},
+    {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"},
+    {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"},
+]
+
+[[package]]
+name = "fsspec"
+version = "2023.12.2"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"},
+    {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+devel = ["pytest", "pytest-cov"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+tqdm = ["tqdm"]
+
+[[package]]
+name = "furo"
+version = "2024.8.6"
+description = "A clean customisable Sphinx documentation theme."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"},
+    {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"},
+]
+
+[package.dependencies]
+beautifulsoup4 = "*"
+pygments = ">=2.7"
+sphinx = ">=6.0,<9.0"
+sphinx-basic-ng = ">=1.0.0.beta2"
+
+[[package]]
+name = "gcsfs"
+version = "2023.12.2.post1"
+description = "Convenient Filesystem interface over GCS"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "gcsfs-2023.12.2.post1-py2.py3-none-any.whl", hash = "sha256:4123cee2c44118d4c0c0f7405abe7610dd2d87087857520c6a7769765ec51d43"},
+    {file = "gcsfs-2023.12.2.post1.tar.gz", hash = "sha256:e38b7e59580a1e490d62d55a47cba33b49a941b01917c3d6f6cfd2563371ab7b"},
+]
+
+[package.dependencies]
+aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1"
+decorator = ">4.1.2"
+fsspec = "2023.12.2"
+google-auth = ">=1.2"
+google-auth-oauthlib = "*"
+google-cloud-storage = "*"
+requests = "*"
+
+[package.extras]
+crc = ["crcmod"]
+gcsfuse = ["fusepy"]
+
+[[package]]
+name = "google-api-core"
+version = "2.23.0"
+description = "Google API client core library"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "google_api_core-2.23.0-py3-none-any.whl", hash = "sha256:c20100d4c4c41070cf365f1d8ddf5365915291b5eb11b83829fbd1c999b5122f"},
+    {file = "google_api_core-2.23.0.tar.gz", hash = "sha256:2ceb087315e6af43f256704b871d99326b1f12a9d6ce99beaedec99ba26a0ace"},
+]
+
+[package.dependencies]
+google-auth = ">=2.14.1,<3.0.dev0"
+googleapis-common-protos = ">=1.56.2,<2.0.dev0"
+proto-plus = ">=1.22.3,<2.0.0dev"
+protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
+requests = ">=2.18.0,<3.0.0.dev0"
+
+[package.extras]
+async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"]
+grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
+grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+
+[[package]]
+name = "google-auth"
+version = "2.36.0"
+description = "Google Authentication Library"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "google_auth-2.36.0-py2.py3-none-any.whl", hash = "sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb"},
+    {file = "google_auth-2.36.0.tar.gz", hash = "sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1"},
+]
+
+[package.dependencies]
+cachetools = ">=2.0.0,<6.0"
+pyasn1-modules = ">=0.2.1"
+rsa = ">=3.1.4,<5"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
+enterprise-cert = ["cryptography", "pyopenssl"]
+pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
+reauth = ["pyu2f (>=0.1.5)"]
+requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
+
+[[package]]
+name = "google-auth-oauthlib"
+version = "1.2.1"
+description = "Google Authentication Library"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f"},
+    {file = "google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263"},
+]
+
+[package.dependencies]
+google-auth = ">=2.15.0"
+requests-oauthlib = ">=0.7.0"
+
+[package.extras]
+tool = ["click (>=6.0.0)"]
+
+[[package]]
+name = "google-cloud-core"
+version = "2.4.1"
+description = "Google Cloud API client core library"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"},
+    {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"},
+]
+
+[package.dependencies]
+google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev"
+google-auth = ">=1.25.0,<3.0dev"
+
+[package.extras]
+grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
+
+[[package]]
+name = "google-cloud-storage"
+version = "2.19.0"
+description = "Google Cloud Storage API client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"},
+    {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"},
+]
+
+[package.dependencies]
+google-api-core = ">=2.15.0,<3.0.0dev"
+google-auth = ">=2.26.1,<3.0dev"
+google-cloud-core = ">=2.3.0,<3.0dev"
+google-crc32c = ">=1.0,<2.0dev"
+google-resumable-media = ">=2.7.2"
+requests = ">=2.18.0,<3.0.0dev"
+
+[package.extras]
+protobuf = ["protobuf (<6.0.0dev)"]
+tracing = ["opentelemetry-api (>=1.1.0)"]
+
+[[package]]
+name = "google-crc32c"
+version = "1.6.0"
+description = "A python wrapper of the C library 'Google CRC32C'"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"},
+    {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"},
+    {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7"},
+    {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e"},
+    {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc"},
+    {file = "google_crc32c-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42"},
+    {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4"},
+    {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8"},
+    {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d"},
+    {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f"},
+    {file = "google_crc32c-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3"},
+    {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d"},
+    {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b"},
+    {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00"},
+    {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3"},
+    {file = "google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57"},
+    {file = "google_crc32c-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c"},
+    {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc"},
+    {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d"},
+    {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24"},
+    {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d"},
+    {file = "google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc"},
+]
+
+[package.extras]
+testing = ["pytest"]
+
+[[package]]
+name = "google-resumable-media"
+version = "2.7.2"
+description = "Utilities for Google Media Downloads and Resumable Uploads"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"},
+    {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"},
+]
+
+[package.dependencies]
+google-crc32c = ">=1.0,<2.0dev"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"]
+requests = ["requests (>=2.18.0,<3.0.0dev)"]
+
+[[package]]
+name = "googleapis-common-protos"
+version = "1.66.0"
+description = "Common protobufs used in Google APIs"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"},
+    {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"},
+]
+
+[package.dependencies]
+protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
+
+[package.extras]
+grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+    {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+    {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
+]
+
+[package.extras]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+    {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+    {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.5.0"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
+    {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
+]
+
+[package.dependencies]
+zipp = ">=3.20"
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+perf = ["ipython"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+type = ["pytest-mypy"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+    {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+    {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+    {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
+    {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
+    {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
+    {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
+    {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
+    {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
+    {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
+]
+
+[[package]]
+name = "mdit-py-plugins"
+version = "0.4.2"
+description = "Collection of plugins for markdown-it-py"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"},
+    {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=1.0.0,<4.0.0"
+
+[package.extras]
+code-style = ["pre-commit"]
+rtd = ["myst-parser", "sphinx-book-theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+    {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "mock"
+version = "5.1.0"
+description = "Rolling backport of unittest.mock for all Pythons"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"},
+    {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"},
+]
+
+[package.extras]
+build = ["blurb", "twine", "wheel"]
+docs = ["sphinx"]
+test = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "multidict"
+version = "6.1.0"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
+    {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
+    {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"},
+    {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"},
+    {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"},
+    {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"},
+    {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"},
+    {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"},
+    {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"},
+    {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"},
+    {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"},
+    {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"},
+    {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"},
+    {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"},
+    {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"},
+    {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"},
+    {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
+
+[[package]]
+name = "mypy"
+version = "1.13.0"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
+    {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
+    {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
+    {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
+    {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
+    {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
+    {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
+    {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
+    {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
+    {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
+    {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
+    {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
+    {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
+    {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
+    {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
+    {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
+    {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
+    {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
+    {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
+    {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
+    {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
+    {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
+    {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
+    {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
+    {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
+    {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
+    {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
+    {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
+    {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
+    {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
+    {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
+    {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=4.6.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+faster-cache = ["orjson"]
+install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+    {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "myst-parser"
+version = "2.0.0"
+description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser,"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"},
+    {file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"},
+]
+
+[package.dependencies]
+docutils = ">=0.16,<0.21"
+jinja2 = "*"
+markdown-it-py = ">=3.0,<4.0"
+mdit-py-plugins = ">=0.4,<1.0"
+pyyaml = "*"
+sphinx = ">=6,<8"
+
+[package.extras]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+linkify = ["linkify-it-py (>=2.0,<3.0)"]
+rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"]
+testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"]
+testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"]
+
+[[package]]
+name = "nox"
+version = "2024.10.9"
+description = "Flexible test automation."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "nox-2024.10.9-py3-none-any.whl", hash = "sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab"},
+    {file = "nox-2024.10.9.tar.gz", hash = "sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95"},
+]
+
+[package.dependencies]
+argcomplete = ">=1.9.4,<4"
+colorlog = ">=2.6.1,<7"
+packaging = ">=20.9"
+tomli = {version = ">=1", markers = "python_version < \"3.11\""}
+virtualenv = ">=20.14.1"
+
+[package.extras]
+tox-to-nox = ["jinja2", "tox"]
+uv = ["uv (>=0.1.6)"]
+
+[[package]]
+name = "oauthlib"
+version = "3.2.2"
+description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
+    {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
+]
+
+[package.extras]
+rsa = ["cryptography (>=3.0.0)"]
+signals = ["blinker (>=1.4.0)"]
+signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
+
+[[package]]
+name = "packaging"
+version = "21.3"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+    {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+
+[package.dependencies]
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+
+[[package]]
+name = "platformdirs"
+version = "4.3.6"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
+    {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
+]
+
+[package.extras]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.11.2)"]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+    {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "propcache"
+version = "0.2.1"
+description = "Accelerated property cache"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"},
+    {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"},
+    {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"},
+    {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"},
+    {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"},
+    {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"},
+    {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"},
+    {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"},
+    {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"},
+    {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"},
+    {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"},
+    {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"},
+    {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"},
+    {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"},
+    {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"},
+]
+
+[[package]]
+name = "proto-plus"
+version = "1.25.0"
+description = "Beautiful, Pythonic protocol buffers."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"},
+    {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"},
+]
+
+[package.dependencies]
+protobuf = ">=3.19.0,<6.0.0dev"
+
+[package.extras]
+testing = ["google-api-core (>=1.31.5)"]
+
+[[package]]
+name = "protobuf"
+version = "5.29.1"
+description = ""
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"},
+    {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"},
+    {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"},
+    {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"},
+    {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"},
+    {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"},
+    {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"},
+    {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"},
+    {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"},
+    {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"},
+    {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"},
+]
+
+[[package]]
+name = "pyasn1"
+version = "0.6.1"
+description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
+    {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.4.1"
+description = "A collection of ASN.1-based protocols modules"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
+    {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6,<0.7.0"
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+    {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pyparsing"
+version = "3.2.0"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"},
+    {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"},
+]
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
+
+[[package]]
+name = "pytest"
+version = "7.4.4"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
+    {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.0"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
+    {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+    {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+    {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+    {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+    {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+    {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+    {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+    {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+    {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+    {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+    {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+    {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+    {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+    {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+    {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+    {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+    {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+    {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+    {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+    {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+    {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+    {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+    {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+    {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+    {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+    {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+    {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+    {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+    {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+    {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+    {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+    {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+    {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+    {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+    {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+    {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+    {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+    {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+    {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+    {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+    {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+    {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+    {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+    {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+    {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+    {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+    {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+    {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+    {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+    {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+    {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+    {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+    {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+    {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "requests-oauthlib"
+version = "2.0.0"
+description = "OAuthlib authentication support for Requests."
+optional = false
+python-versions = ">=3.4"
+files = [
+    {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"},
+    {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"},
+]
+
+[package.dependencies]
+oauthlib = ">=3.0.0"
+requests = ">=2.0.0"
+
+[package.extras]
+rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+
+[[package]]
+name = "rsa"
+version = "4.9"
+description = "Pure-Python RSA implementation"
+optional = false
+python-versions = ">=3.6,<4"
+files = [
+    {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+    {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.1.3"
+
+[[package]]
+name = "ruamel-yaml"
+version = "0.18.6"
+description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"},
+    {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"},
+]
+
+[package.dependencies]
+"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""}
+
+[package.extras]
+docs = ["mercurial (>5.7)", "ryd"]
+jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
+
+[[package]]
+name = "ruamel-yaml-clib"
+version = "0.2.12"
+description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
+    {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
+    {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
+    {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
+    {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
+    {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
+    {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.1.5"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"},
+    {file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"},
+    {file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"},
+    {file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"},
+    {file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"},
+    {file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"},
+    {file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"},
+    {file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"},
+    {file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"},
+    {file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"},
+    {file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"},
+]
+
+[[package]]
+name = "safety"
+version = "2.3.5"
+description = "Checks installed dependencies for known vulnerabilities and licenses."
+optional = false
+python-versions = "*"
+files = [
+    {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"},
+    {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"},
+]
+
+[package.dependencies]
+Click = ">=8.0.2"
+dparse = ">=0.6.2"
+packaging = ">=21.0,<22.0"
+requests = "*"
+"ruamel.yaml" = ">=0.17.21"
+setuptools = ">=19.3"
+
+[package.extras]
+github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"]
+gitlab = ["python-gitlab (>=1.3.0)"]
+
+[[package]]
+name = "setuptools"
+version = "75.6.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"},
+    {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"},
+]
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"]
+core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+description = "Sniff out which async library your code is running under"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+    {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+optional = false
+python-versions = "*"
+files = [
+    {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+    {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.6"
+description = "A modern CSS selector implementation for Beautiful Soup."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
+    {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
+]
+
+[[package]]
+name = "sphinx"
+version = "7.3.7"
+description = "Python documentation generator"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"},
+    {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"},
+]
+
+[package.dependencies]
+alabaster = ">=0.7.14,<0.8.0"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.18.1,<0.22"
+imagesize = ">=1.3"
+importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.14"
+requests = ">=2.25.0"
+snowballstemmer = ">=2.0"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = ">=2.0.0"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = ">=1.1.9"
+tomli = {version = ">=2", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"]
+test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"]
+
+[[package]]
+name = "sphinx-autobuild"
+version = "2024.10.3"
+description = "Rebuild Sphinx documentation on changes, with hot reloading in the browser."
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinx_autobuild-2024.10.3-py3-none-any.whl", hash = "sha256:158e16c36f9d633e613c9aaf81c19b0fc458ca78b112533b20dafcda430d60fa"},
+    {file = "sphinx_autobuild-2024.10.3.tar.gz", hash = "sha256:248150f8f333e825107b6d4b86113ab28fa51750e5f9ae63b59dc339be951fb1"},
+]
+
+[package.dependencies]
+colorama = ">=0.4.6"
+sphinx = "*"
+starlette = ">=0.35"
+uvicorn = ">=0.25"
+watchfiles = ">=0.20"
+websockets = ">=11"
+
+[package.extras]
+test = ["httpx", "pytest (>=6)"]
+
+[[package]]
+name = "sphinx-basic-ng"
+version = "1.0.0b2"
+description = "A modern skeleton for Sphinx themes."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"},
+    {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"},
+]
+
+[package.dependencies]
+sphinx = ">=4.0"
+
+[package.extras]
+docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"]
+
+[[package]]
+name = "sphinx-click"
+version = "5.2.1"
+description = "Sphinx extension that automatically documents click applications"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "sphinx_click-5.2.1-py3-none-any.whl", hash = "sha256:7aaa97bdce6fe315b3e719c085cd566e75470cbd91f1f6998b91213a09b8e7ec"},
+    {file = "sphinx_click-5.2.1.tar.gz", hash = "sha256:a669773f76db05a3e04c163da91cd5e7db1a4761a6ea9eb074404a393094cde7"},
+]
+
+[package.dependencies]
+click = ">=7.0"
+docutils = "*"
+sphinx = ">=2.0"
+
+[[package]]
+name = "sphinx-copybutton"
+version = "0.5.2"
+description = "Add a copy button to each of your code cells."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"},
+    {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"},
+]
+
+[package.dependencies]
+sphinx = ">=1.8"
+
+[package.extras]
+code-style = ["pre-commit (==2.12.1)"]
+rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "2.0.0"
+description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"},
+    {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"},
+]
+
+[package.extras]
+lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "2.0.0"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"},
+    {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"},
+]
+
+[package.extras]
+lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.1.0"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"},
+    {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"},
+]
+
+[package.extras]
+lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
+standalone = ["Sphinx (>=5)"]
+test = ["html5lib", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+    {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[package.extras]
+test = ["flake8", "mypy", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "2.0.0"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"},
+    {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"},
+]
+
+[package.extras]
+lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
+standalone = ["Sphinx (>=5)"]
+test = ["defusedxml (>=0.7.1)", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "2.0.0"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"},
+    {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"},
+]
+
+[package.extras]
+lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "starlette"
+version = "0.41.3"
+description = "The little ASGI library that shines."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"},
+    {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"},
+]
+
+[package.dependencies]
+anyio = ">=3.4.0,<5"
+typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
+
+[[package]]
+name = "tomli"
+version = "2.2.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
+    {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
+    {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
+    {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
+    {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
+    {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
+    {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
+    {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
+    {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
+    {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
+    {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
+    {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
+    {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
+    {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
+    {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
+    {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
+    {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
+    {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
+    {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
+    {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
+    {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
+    {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
+    {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
+    {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
+    {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
+    {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
+    {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
+    {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
+    {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
+    {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
+    {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
+    {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
+]
+
+[[package]]
+name = "typeguard"
+version = "4.4.1"
+description = "Run-time type checker for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"},
+    {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
+typing-extensions = ">=4.10.0"
+
+[package.extras]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"]
+test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+    {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.3"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
+    {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "uvicorn"
+version = "0.32.1"
+description = "The lightning-fast ASGI server."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"},
+    {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"},
+]
+
+[package.dependencies]
+click = ">=7.0"
+h11 = ">=0.8"
+typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.28.0"
+description = "Virtual Python Environment builder"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"},
+    {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<5"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "watchfiles"
+version = "1.0.0"
+description = "Simple, modern and high performance file watching and code reload in python."
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "watchfiles-1.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1d19df28f99d6a81730658fbeb3ade8565ff687f95acb59665f11502b441be5f"},
+    {file = "watchfiles-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:28babb38cf2da8e170b706c4b84aa7e4528a6fa4f3ee55d7a0866456a1662041"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ab123135b2f42517f04e720526d41448667ae8249e651385afb5cda31fedc0"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13a4f9ee0cd25682679eea5c14fc629e2eaa79aab74d963bc4e21f43b8ea1877"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e1d9284cc84de7855fcf83472e51d32daf6f6cecd094160192628bc3fee1b78"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ee5edc939f53466b329bbf2e58333a5461e6c7b50c980fa6117439e2c18b42d"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dccfc70480087567720e4e36ec381bba1ed68d7e5f368fe40c93b3b1eba0105"},
+    {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c83a6d33a9eda0af6a7470240d1af487807adc269704fe76a4972dd982d16236"},
+    {file = "watchfiles-1.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:905f69aad276639eff3893759a07d44ea99560e67a1cf46ff389cd62f88872a2"},
+    {file = "watchfiles-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09551237645d6bff3972592f2aa5424df9290e7a2e15d63c5f47c48cde585935"},
+    {file = "watchfiles-1.0.0-cp310-none-win32.whl", hash = "sha256:d2b39aa8edd9e5f56f99a2a2740a251dc58515398e9ed5a4b3e5ff2827060755"},
+    {file = "watchfiles-1.0.0-cp310-none-win_amd64.whl", hash = "sha256:2de52b499e1ab037f1a87cb8ebcb04a819bf087b1015a4cf6dcf8af3c2a2613e"},
+    {file = "watchfiles-1.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fbd0ab7a9943bbddb87cbc2bf2f09317e74c77dc55b1f5657f81d04666c25269"},
+    {file = "watchfiles-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:774ef36b16b7198669ce655d4f75b4c3d370e7f1cbdfb997fb10ee98717e2058"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b4fb98100267e6a5ebaff6aaa5d20aea20240584647470be39fe4823012ac96"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0fc3bf0effa2d8075b70badfdd7fb839d7aa9cea650d17886982840d71fdeabf"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:648e2b6db53eca6ef31245805cd528a16f56fa4cc15aeec97795eaf713c11435"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa13d604fcb9417ae5f2e3de676e66aa97427d888e83662ad205bed35a313176"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:936f362e7ff28311b16f0b97ec51e8f2cc451763a3264640c6ed40fb252d1ee4"},
+    {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245fab124b9faf58430da547512d91734858df13f2ddd48ecfa5e493455ffccb"},
+    {file = "watchfiles-1.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ff9c7e84e8b644a8f985c42bcc81457240316f900fc72769aaedec9d088055a"},
+    {file = "watchfiles-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c9a8d8fd97defe935ef8dd53d562e68942ad65067cd1c54d6ed8a088b1d931d"},
+    {file = "watchfiles-1.0.0-cp311-none-win32.whl", hash = "sha256:a0abf173975eb9dd17bb14c191ee79999e650997cc644562f91df06060610e62"},
+    {file = "watchfiles-1.0.0-cp311-none-win_amd64.whl", hash = "sha256:2a825ba4b32c214e3855b536eb1a1f7b006511d8e64b8215aac06eb680642d84"},
+    {file = "watchfiles-1.0.0-cp311-none-win_arm64.whl", hash = "sha256:a5a7a06cfc65e34fd0a765a7623c5ba14707a0870703888e51d3d67107589817"},
+    {file = "watchfiles-1.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:28fb64b5843d94e2c2483f7b024a1280662a44409bedee8f2f51439767e2d107"},
+    {file = "watchfiles-1.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e3750434c83b61abb3163b49c64b04180b85b4dabb29a294513faec57f2ffdb7"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bedf84835069f51c7b026b3ca04e2e747ea8ed0a77c72006172c72d28c9f69fc"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90004553be36427c3d06ec75b804233f8f816374165d5225b93abd94ba6e7234"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b46e15c34d4e401e976d6949ad3a74d244600d5c4b88c827a3fdf18691a46359"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:487d15927f1b0bd24e7df921913399bb1ab94424c386bea8b267754d698f8f0e"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ff236d7a3f4b0a42f699a22fc374ba526bc55048a70cbb299661158e1bb5e1f"},
+    {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c01446626574561756067f00b37e6b09c8622b0fc1e9fdbc7cbcea328d4e514"},
+    {file = "watchfiles-1.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b551c465a59596f3d08170bd7e1c532c7260dd90ed8135778038e13c5d48aa81"},
+    {file = "watchfiles-1.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1ed613ee107269f66c2df631ec0fc8efddacface85314d392a4131abe299f00"},
+    {file = "watchfiles-1.0.0-cp312-none-win32.whl", hash = "sha256:5f75cd42e7e2254117cf37ff0e68c5b3f36c14543756b2da621408349bd9ca7c"},
+    {file = "watchfiles-1.0.0-cp312-none-win_amd64.whl", hash = "sha256:cf517701a4a872417f4e02a136e929537743461f9ec6cdb8184d9a04f4843545"},
+    {file = "watchfiles-1.0.0-cp312-none-win_arm64.whl", hash = "sha256:8a2127cd68950787ee36753e6d401c8ea368f73beaeb8e54df5516a06d1ecd82"},
+    {file = "watchfiles-1.0.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:95de85c254f7fe8cbdf104731f7f87f7f73ae229493bebca3722583160e6b152"},
+    {file = "watchfiles-1.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:533a7cbfe700e09780bb31c06189e39c65f06c7f447326fee707fd02f9a6e945"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2218e78e2c6c07b1634a550095ac2a429026b2d5cbcd49a594f893f2bb8c936"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9122b8fdadc5b341315d255ab51d04893f417df4e6c1743b0aac8bf34e96e025"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9272fdbc0e9870dac3b505bce1466d386b4d8d6d2bacf405e603108d50446940"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3b33c3aefe9067ebd87846806cd5fc0b017ab70d628aaff077ab9abf4d06b3"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc338ce9f8846543d428260fa0f9a716626963148edc937d71055d01d81e1525"},
+    {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac778a460ea22d63c7e6fb0bc0f5b16780ff0b128f7f06e57aaec63bd339285"},
+    {file = "watchfiles-1.0.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53ae447f06f8f29f5ab40140f19abdab822387a7c426a369eb42184b021e97eb"},
+    {file = "watchfiles-1.0.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1f73c2147a453315d672c1ad907abe6d40324e34a185b51e15624bc793f93cc6"},
+    {file = "watchfiles-1.0.0-cp313-none-win32.whl", hash = "sha256:eba98901a2eab909dbd79681190b9049acc650f6111fde1845484a4450761e98"},
+    {file = "watchfiles-1.0.0-cp313-none-win_amd64.whl", hash = "sha256:d562a6114ddafb09c33246c6ace7effa71ca4b6a2324a47f4b09b6445ea78941"},
+    {file = "watchfiles-1.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3d94fd83ed54266d789f287472269c0def9120a2022674990bd24ad989ebd7a0"},
+    {file = "watchfiles-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48051d1c504448b2fcda71c5e6e3610ae45de6a0b8f5a43b961f250be4bdf5a8"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29cf884ad4285d23453c702ed03d689f9c0e865e3c85d20846d800d4787de00f"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d3572d4c34c4e9c33d25b3da47d9570d5122f8433b9ac6519dca49c2740d23cd"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c2696611182c85eb0e755b62b456f48debff484b7306b56f05478b843ca8ece"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:550109001920a993a4383b57229c717fa73627d2a4e8fcb7ed33c7f1cddb0c85"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b555a93c15bd2c71081922be746291d776d47521a00703163e5fbe6d2a402399"},
+    {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:947ccba18a38b85c366dafeac8df2f6176342d5992ca240a9d62588b214d731f"},
+    {file = "watchfiles-1.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ffd98a299b0a74d1b704ef0ed959efb753e656a4e0425c14e46ae4c3cbdd2919"},
+    {file = "watchfiles-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f8c4f3a1210ed099a99e6a710df4ff2f8069411059ffe30fa5f9467ebed1256b"},
+    {file = "watchfiles-1.0.0-cp39-none-win32.whl", hash = "sha256:1e176b6b4119b3f369b2b4e003d53a226295ee862c0962e3afd5a1c15680b4e3"},
+    {file = "watchfiles-1.0.0-cp39-none-win_amd64.whl", hash = "sha256:2d9c0518fabf4a3f373b0a94bb9e4ea7a1df18dec45e26a4d182aa8918dee855"},
+    {file = "watchfiles-1.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f159ac795785cde4899e0afa539f4c723fb5dd336ce5605bc909d34edd00b79b"},
+    {file = "watchfiles-1.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c3d258d78341d5d54c0c804a5b7faa66cd30ba50b2756a7161db07ce15363b8d"},
+    {file = "watchfiles-1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbd0311588c2de7f9ea5cf3922ccacfd0ec0c1922870a2be503cc7df1ca8be7"},
+    {file = "watchfiles-1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a13ac46b545a7d0d50f7641eefe47d1597e7d1783a5d89e09d080e6dff44b0"},
+    {file = "watchfiles-1.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2bca898c1dc073912d3db7fa6926cc08be9575add9e84872de2c99c688bac4e"},
+    {file = "watchfiles-1.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:06d828fe2adc4ac8a64b875ca908b892a3603d596d43e18f7948f3fef5fc671c"},
+    {file = "watchfiles-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:074c7618cd6c807dc4eaa0982b4a9d3f8051cd0b72793511848fd64630174b17"},
+    {file = "watchfiles-1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95dc785bc284552d044e561b8f4fe26d01ab5ca40d35852a6572d542adfeb4bc"},
+    {file = "watchfiles-1.0.0.tar.gz", hash = "sha256:37566c844c9ce3b5deb964fe1a23378e575e74b114618d211fbda8f59d7b5dab"},
+]
+
+[package.dependencies]
+anyio = ">=3.0.0"
+
+[[package]]
+name = "websockets"
+version = "14.1"
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29"},
+    {file = "websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179"},
+    {file = "websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250"},
+    {file = "websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0"},
+    {file = "websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0"},
+    {file = "websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199"},
+    {file = "websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58"},
+    {file = "websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078"},
+    {file = "websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434"},
+    {file = "websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10"},
+    {file = "websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e"},
+    {file = "websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512"},
+    {file = "websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac"},
+    {file = "websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280"},
+    {file = "websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1"},
+    {file = "websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3"},
+    {file = "websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6"},
+    {file = "websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0"},
+    {file = "websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89"},
+    {file = "websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23"},
+    {file = "websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e"},
+    {file = "websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09"},
+    {file = "websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed"},
+    {file = "websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d"},
+    {file = "websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707"},
+    {file = "websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a"},
+    {file = "websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45"},
+    {file = "websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58"},
+    {file = "websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058"},
+    {file = "websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4"},
+    {file = "websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05"},
+    {file = "websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0"},
+    {file = "websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f"},
+    {file = "websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9"},
+    {file = "websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b"},
+    {file = "websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3"},
+    {file = "websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59"},
+    {file = "websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2"},
+    {file = "websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da"},
+    {file = "websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9"},
+    {file = "websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7"},
+    {file = "websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a"},
+    {file = "websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6"},
+    {file = "websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0"},
+    {file = "websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a"},
+    {file = "websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6"},
+    {file = "websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56"},
+    {file = "websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c"},
+    {file = "websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b"},
+    {file = "websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78"},
+    {file = "websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735"},
+    {file = "websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a"},
+    {file = "websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc"},
+    {file = "websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4"},
+    {file = "websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7"},
+    {file = "websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1"},
+    {file = "websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5"},
+    {file = "websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e"},
+    {file = "websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8"},
+]
+
+[[package]]
+name = "xdoctest"
+version = "1.2.0"
+description = "A rewrite of the builtin doctest module"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "xdoctest-1.2.0-py3-none-any.whl", hash = "sha256:0f1ecf5939a687bd1fc8deefbff1743c65419cce26dff908f8b84c93fbe486bc"},
+    {file = "xdoctest-1.2.0.tar.gz", hash = "sha256:d8cfca6d8991e488d33f756e600d35b9fdf5efd5c3a249d644efcbbbd2ed5863"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.4.1", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""}
+Pygments = {version = ">=2.4.1", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""}
+
+[package.extras]
+all = ["IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)"]
+all-strict = ["IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)"]
+colors = ["Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "colorama (>=0.4.1)"]
+colors-strict = ["Pygments (==2.0.0)", "Pygments (==2.4.1)", "colorama (==0.4.1)"]
+docs = ["Pygments (>=2.9.0)", "myst-parser (>=0.18.0)", "sphinx (>=5.0.1)", "sphinx-autoapi (>=1.8.4)", "sphinx-autobuild (>=2021.3.14)", "sphinx-reredirects (>=0.0.1)", "sphinx-rtd-theme (>=1.0.0)", "sphinxcontrib-napoleon (>=0.7)"]
+docs-strict = ["Pygments (==2.9.0)", "myst-parser (==0.18.0)", "sphinx (==5.0.1)", "sphinx-autoapi (==1.8.4)", "sphinx-autobuild (==2021.3.14)", "sphinx-reredirects (==0.0.1)", "sphinx-rtd-theme (==1.0.0)", "sphinxcontrib-napoleon (==0.7)"]
+jupyter = ["IPython (>=7.23.1)", "attrs (>=19.2.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)"]
+jupyter-strict = ["IPython (==7.23.1)", "attrs (==19.2.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)"]
+optional = ["IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"]
+optional-strict = ["IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
+tests = ["pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)"]
+tests-binary = ["cmake (>=3.21.2)", "cmake (>=3.25.0)", "ninja (>=1.10.2)", "ninja (>=1.11.1)", "pybind11 (>=2.10.3)", "pybind11 (>=2.7.1)", "scikit-build (>=0.11.1)", "scikit-build (>=0.16.1)"]
+tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"]
+tests-strict = ["pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)"]
+
+[[package]]
+name = "yarl"
+version = "1.18.3"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"},
+    {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"},
+    {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"},
+    {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"},
+    {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"},
+    {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"},
+    {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"},
+    {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"},
+    {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"},
+    {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"},
+    {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"},
+    {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"},
+    {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"},
+    {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"},
+    {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+propcache = ">=0.2.0"
+
+[[package]]
+name = "zipp"
+version = "3.21.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
+    {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
+]
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+type = ["pytest-mypy"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = ">=3.9,<3.12"
+content-hash = "80fd8b9cad8adf845ec71c7f812aa275c59688117bf7699087a38aef422998e9"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..858cbf1
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,138 @@
+[tool.poetry]
+name = "sdfs"
+version = "0.1.0"
+description = "OSDU Seismic Store fsspec filesystem implementation"
+authors = [
+    "Rostislav_Dublin <rostislav_dublin@epam.com>",
+    "Vadzim_Kulyba <vadzim_kulyba@epam.com>",
+]
+readme = "README.md"
+repository = "https://gitbud.epam.com/epm-osdu/epam-osdu/tgs-mdio/sdfs"
+documentation = "https://gitbud.epam.com/epm-osdu/epam-osdu/tgs-mdio/sdfs/-/blob/main/README.md"
+classifiers = [
+    "Development Status :: 4 - Beta",
+]
+packages = [
+    { include = "sdfs", from = "src" },
+]
+keywords = ["sdfs", "osdu", "sdpath", "seismic", "mdio", "fsspec"]
+
+[tool.poetry.dependencies]
+python = ">=3.9,<3.12"
+typing-extensions = "^4.8.0"
+fsspec = "^2023.9.2"
+gcsfs  = "^2023.9.2"
+nox = "^2024.10.9"
+
+[tool.poetry.group.dev.dependencies]
+coverage = {version = "^7.3.0", extras = ["toml"]}
+ruff = "0.1.5"
+furo = ">=2023.9.10"
+mock = "^5.1.0"
+mypy = "^1.5.1"
+pytest = "^7.4.0"
+safety = "^2.3.5"
+sphinx-autobuild = ">=2021.3.14"
+sphinx-click = "^5.0.1"
+sphinx-copybutton = "^0.5.2"
+typeguard = "^4.1.2"
+xdoctest = {version = "^1.1.1", extras = ["colors"]}
+myst-parser = "^2.0.0"
+Pygments = "^2.16.1"
+Sphinx = "^7.2.3"
+python-dotenv = "1.0.0"
+
+[tool.poetry.plugins."fsspec.specs"]
+"sd" = "sdfs.core:SDFileSystem"
+
+[tool.coverage.paths]
+source = ["src", "*/site-packages"]
+tests = ["tests", "*/tests"]
+
+[tool.coverage.run]
+branch = true
+source = ["sdfs", "tests"]
+
+[tool.coverage.report]
+show_missing = true
+fail_under = 80
+
+[tool.mypy]
+strict = true
+warn_unreachable = true
+pretty = true
+show_column_numbers = true
+show_error_codes = true
+show_error_context = true
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.ruff]
+select = [
+    "E",  # pycodestyle
+    "F",   # pyflakes
+    "B",  # bugbear
+    "I",  # isort
+    "UP",  # pyupgrade
+    "N",  # pep8-naming
+    "D",   # pydocstyle
+    "ANN",  # annotations
+    "S",  # bandit
+    "A",  # builtins
+    "C4",  # comprehensions
+    "DTZ",  # datetimez
+    "EM",  # errmsg
+    "ICN",  # import-conventions
+    "PIE",  # pie
+    "PT",  # pytest-style
+    "RSE",  # raise
+    "RET",  # return
+    "SIM",  # simplify
+    "TID",  # tidy-imports
+    "TCH",  # type-checking
+    "ARG",  # unused-arguments
+    "PTH",  # use-pathlib
+    "TD",  # todos
+    "PL",  # pylint
+    "FLY", # flynt
+    "NPY",  # numpy
+]
+ignore = [
+    "ANN101",  # Missing type annotation for `self`
+]
+line-length = 88
+
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[tool.ruff.per-file-ignores]
+"src/sdfs/core.py" = [
+    # can't change args count
+    "PLR0913",
+    # for cloud pathes would better usual os.path
+    "PTH118"
+]
+"tests/**/*.py" = [
+    # disable asserting check for tests
+    "S101", "PT019"
+]
+"tests/test_retry_flow.py" = [
+    # disable multiline assertion
+    "PT018"
+]
+"tests/integration/test_sd_file_system_integration.py" = [
+    # checking file counts and sizes
+    "PLR2004",
+    # for long test cases
+    "PLR0915"
+]
+
+"tests/integration/test_single_file_sd_integration.py" = [
+    # checking file counts and sizes
+    "PLR2004",
+    # for long test cases
+    "PLR0915",
+    "E501"
+]
diff --git a/src/sdfs/__init__.py b/src/sdfs/__init__.py
new file mode 100644
index 0000000..8cd3781
--- /dev/null
+++ b/src/sdfs/__init__.py
@@ -0,0 +1,33 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""SDFS library."""
+
+
+from importlib import metadata
+
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+from sdfs.core import SDFileSystem
+
+__all__ = ["SDFileSystem", "SeismicDmsClient"]
+
+
+try:
+    __version__ = metadata.version("sdfs")
+except metadata.PackageNotFoundError:
+    __version__ = "unknown"
+
diff --git a/src/sdfs/clients/__init__.py b/src/sdfs/clients/__init__.py
new file mode 100644
index 0000000..a6f7356
--- /dev/null
+++ b/src/sdfs/clients/__init__.py
@@ -0,0 +1,17 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
diff --git a/src/sdfs/clients/seismic_dms_client.py b/src/sdfs/clients/seismic_dms_client.py
new file mode 100644
index 0000000..6a813f9
--- /dev/null
+++ b/src/sdfs/clients/seismic_dms_client.py
@@ -0,0 +1,474 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""OSDU Seismic DMS client."""
+import json
+import logging
+from typing import Any, Callable, Optional
+
+import requests
+from decorator import decorator
+
+from sdfs.exceptions import (
+    AuthStrategyError,
+    CredentialsError,
+    DatasetBoundariesError,
+    ValidationError,
+)
+from sdfs.providers.factory import ProviderSpecificFactory
+from sdfs.utils.http_utils import UNAUTHORIZED_CODE, HttpMethod
+from sdfs.utils.validators import is_valid_url
+
+logger = logging.getLogger(__name__)
+
+
+@decorator
+def retry_request(func: Callable, retries: int = 3, *args: int, **kwargs: int) -> Any:  # noqa: ANN401
+    """Retrying OSDU refresh token.
+
+    Args:
+        func (Callable): osdu req handler func.
+        retries (int): number of retries. Defaults to 3.
+        *args: args.
+        **kwargs: kwargs.
+
+    Raises:
+        e: out of retries on exception
+        e: retrying after exception
+
+    Returns:
+        Any: func result
+    """
+    for retry in range(retries):  # noqa: RET503
+        try:
+            return func(*args, **kwargs)
+        except requests.exceptions.HTTPError as e:
+            if retry == retries - 1:
+                logger.exception(f"{func.__name__} out of retries on exception: {e}")
+                raise e
+            if e.response.status_code == UNAUTHORIZED_CODE:
+                self: "SeismicDmsClient" = args[0]
+                if self._auth_strategy == "refresh_token":
+                    self.refresh_token()
+                    logger.debug(f"{func.__name__} retrying after exception: {e}")
+                    continue
+
+            logger.exception("Can't refresh OSDU Data Platform token", exc_info=False)
+            raise e
+
+
+class SeismicDmsClient:
+    """Seismic DMS Client designed to work with OSDU seismic store service.
+
+    - Getting information about sd path
+    - Getting downscoped credentials for accessing to dataset storage
+    """
+
+    SD_PATH_MIN_SLUG_COUNT = 3
+
+    WRITE_SLUG = "upload-connection-string"
+    READ_SLUG = "download-connection-string"
+    SD_PATH_PREFIX = "sd://"
+
+    def __init__(  # noqa: PLR0913
+        self,
+        sdpath: str,
+        seismic_dms_url: str,
+        access_token: Optional[str] = None,
+        refresh_url: Optional[str] = None,
+        refresh_token: Optional[str] = None,
+        client_id: Optional[str] = None,
+        client_secret: Optional[str] = None,
+        write_access: bool = False,
+    ) -> None:
+        """Initial service configuration and choosing service auth strategy.
+
+        Args:
+            sdpath (str): Path to a target seismic dataset on the OSDU Seismic Store
+            seismic_dms_url (str): OSDU Seismic DMS service URL
+            access_token (Optional[str]): oauth2 access token of the OSDU user
+            refresh_token (Optional[str]): oauth2 refresh token of the OSDU user
+            refresh_url (Optional[str]): oauth2 token refresh url of the OSDU user IdP
+            client_id (Optional[str]): oauth2 client application id
+            client_secret (Optional[str]): oauth2 client application secret
+            write_access (bool): If it is allowed for SDFS to modify data. Defaults to False
+
+        Raises:
+            ValidationError: SDpath arg is not provided
+            ValidationError: Invalid seismic service url
+            CredentialsError: No one of auth args is provided: access_token, refresh_token
+            CredentialsError: Both args are provided: access_token, refresh_token
+            AuthStrategyError: Invalid set of args for refresh_token auth strategy
+            ValidationError: Invalid refresh token url
+        """  # noqa: E501
+        self._session_statistics = {
+            "access_token_refreshes": 0,
+            "downscoped_credentials_updates": 0,
+        }
+
+        if not sdpath:
+            msg = "SDpath arg is not provided"
+            raise ValidationError(msg)
+
+        if not seismic_dms_url or not is_valid_url(seismic_dms_url):
+            msg = "Invalid seismic service url"
+            raise ValidationError(msg)
+
+        self._seismic_dms_url = seismic_dms_url
+
+        # auth strategy choosing
+        if not access_token and not refresh_token:
+            msg = "No one of auth args is provided: access_token, refresh_token"
+            raise CredentialsError(msg)
+
+        if access_token and refresh_token:
+            msg = "Both args are provided: access_token, refresh_token"
+            raise CredentialsError(msg)
+
+        self._sdpath = sdpath
+        self._split_sdpath()
+
+        if refresh_token:
+            self._auth_strategy = "refresh_token"
+
+            if not (refresh_token and refresh_url and client_id and client_secret):
+                msg = "Invalid set of args for refresh_token auth strategy"
+                raise AuthStrategyError(msg)
+
+            if not is_valid_url(refresh_url):
+                msg = "Invalid refresh token url"
+                raise ValidationError(msg)
+
+            self._refresh_token = refresh_token
+
+            self._refresh_url = refresh_url
+
+            self._client_id = client_id
+            self._client_secret = client_secret
+
+            self.refresh_token()
+        else:
+            self._auth_strategy = "access_token"
+            self._access_token = access_token
+
+        self._write_access = bool(write_access)
+
+        self._dataset_properties = None
+        self._downscoped_credentials = None
+
+        self.get_dataset_properties()
+
+        provider_specific_factory = ProviderSpecificFactory(self)
+        self._provider_specific = provider_specific_factory.get_provider_specific()
+
+    def _sdpath_validator(self) -> None:
+        """Validate SDpath structure.
+
+        Raises:
+            ValidationError: Invalid sdpath format
+        """
+        msg = "Invalid sdpath format"
+
+        if self._sdpath.startswith(self.SD_PATH_PREFIX):
+            cloud_path = self._sdpath.split(self.SD_PATH_PREFIX)[-1]
+            self._cloud_path_slugs = cloud_path.split("/")
+            if len(self._cloud_path_slugs) < self.SD_PATH_MIN_SLUG_COUNT:
+                raise ValidationError(msg)
+        else:
+            raise ValidationError(msg)
+
+    def _split_sdpath(self) -> None:
+        """Get SDpath atomic components."""
+        self._sdpath_validator()
+
+        self._path = self._sdpath.removeprefix("sd://")
+        self._sd_tenant = self._cloud_path_slugs.pop(0)
+        self._sd_subproject = self._cloud_path_slugs.pop(0)
+        self._sd_dataset_name = self._cloud_path_slugs.pop()
+        self._sd_subproject_path = (
+            "/".join(self._cloud_path_slugs) if len(self._cloud_path_slugs) else None
+        )
+
+    @property
+    def sdpath(self) -> str:
+        """Accessing the protected sdpath.
+
+        Returns:
+            str: public sdpath property
+        """
+        return self._sdpath
+
+    def refresh_token(self) -> str:
+        """Get refreshed OSDU access_token for the invoker OSDU user if absent or expired.
+
+        Raises:
+            AuthStrategyError: This auth strategy don't support this method
+
+        Returns:
+            str: OSDU access_token
+        """  # noqa: E501
+        if self._auth_strategy != "refresh_token":
+            msg = "This auth strategy don't support this method"
+            raise AuthStrategyError(msg)
+
+        headers = {"Content-Type": "application/x-www-form-urlencoded"}
+
+        auth_params = {
+            "grant_type": "refresh_token",
+            "refresh_token": self._refresh_token,
+            "client_id": self._client_id,
+            "client_secret": self._client_secret,
+        }
+
+        auth_refresh_payload = "&".join(
+            [f"{parameter}={auth_params[parameter]}" for parameter in auth_params]
+        )
+
+        response = requests.request(
+            method=HttpMethod.POST.value,
+            url=self._refresh_url,
+            headers=headers,
+            data=auth_refresh_payload,
+        )
+
+        if not response.ok:
+            logger.error(response.text)
+            response.raise_for_status()
+
+        response_data = response.json()
+        self._access_token = response_data["access_token"]
+        self._session_statistics["access_token_refreshes"] += 1
+
+        if response_data.get("refresh_token", None):
+            self._access_token = response_data["refresh_token"]
+
+        return self._access_token
+
+    def _get_auth_headers(self) -> dict:
+        """Prepare OSDU Data Platform auth headers for future requests.
+
+        Returns:
+            dict: osdu headers in pythonic dict format
+        """
+        return {
+            "data-partition-id": self._sd_tenant,
+            "Authorization": f"Bearer {self._access_token}",
+        }
+
+    @retry_request
+    def get_dataset_properties(self, force_request: bool = False) -> dict:
+        """Get seismic dataset metadata from OSDU Seismic DMS catalogue.
+
+        Args:
+            force_request: help to force run req for get dataset info.
+
+        Returns:
+            dict: _dataset_properties in pythonic dict format
+        """
+        if force_request or self._dataset_properties is None:
+            dataset_properties_url = (
+                f"{self._seismic_dms_url}/dataset{self.dataset_path}"
+            )
+            headers = {"Content-Type": "application/json", **self._get_auth_headers()}
+
+            response = requests.request(
+                method=HttpMethod.GET.value,
+                url=dataset_properties_url,
+                headers=headers,
+                data=None,
+            )
+
+            if not response.ok:
+                logger.error(response.text)
+                response.raise_for_status()
+
+            self._service_provider_code = response.headers["service-provider"]
+            self._dataset_properties = response.json()
+
+        return self._dataset_properties
+
+    @retry_request
+    def _register_dataset(self) -> None:
+        """Register OSDU Seismic DMS dataset."""
+        dataset_url = f"{self._seismic_dms_url}/dataset{self.dataset_path}"
+        headers = {"Content-Type": "application/json", **self._get_auth_headers()}
+
+        response = requests.request(
+            method=HttpMethod.POST.value,
+            url=dataset_url,
+            headers=headers,
+            data=None,
+        )
+
+        if not response.ok:
+            logger.error(response.text)
+            response.raise_for_status()
+
+        self._service_provider_code = response.headers["service-provider"]
+        self._dataset_properties = response.json()
+
+        return self._dataset_properties
+
+    @retry_request
+    def _deregister_dataset(self) -> None:
+        """Deregister OSDU Seismic DMS dataset."""
+        dataset_url = f"{self._seismic_dms_url}/dataset{self.dataset_path}"
+        headers = {"Content-Type": "application/json", **self._get_auth_headers()}
+
+        response = requests.request(
+            method=HttpMethod.DELETE.value,
+            url=dataset_url,
+            headers=headers,
+            data=None,
+        )
+
+        if not response.ok:
+            logger.error(response.text)
+            response.raise_for_status()
+
+    @property
+    def dataset_path(self) -> str:
+        """Prepare dataset path for seismic dms service.
+
+        Returns:
+            str: seismic dms dataset path
+        """
+        dms_dataset_path = [
+            f"/tenant/{self._sd_tenant}",
+            f"/subproject/{self._sd_subproject}",
+            f"/dataset/{self._sd_dataset_name}",
+            f"?path={self._sd_subproject_path}",
+        ]
+
+        if not self._sd_subproject_path:
+            dms_dataset_path.pop()
+
+        return "".join(dms_dataset_path)
+
+    def get_service_provider_code(self) -> str:
+        """Get OSDU CSP code.
+
+        Returns:
+            str: Service provider code
+        """
+        return self._service_provider_code
+
+    @retry_request
+    def get_downscoped_credentials(self, force_request: bool = False) -> dict:
+        """Get downscoped credentials for accessing the seismic dataset's storage.
+
+        Args:
+            force_request: help to force run req for refresh creds. Defaults to False.
+
+        Returns:
+            dict: _downscoped_credentials in pythonic dict format
+        """
+        if force_request or self._downscoped_credentials is None:
+            operation_slug = self.WRITE_SLUG if self._write_access else self.READ_SLUG
+            dataset_operation_url = f"{self._seismic_dms_url}/utility/{operation_slug}?sdpath={self._sdpath}"  # noqa: E501
+
+            headers = {"Content-Type": "application/json", **self._get_auth_headers()}
+
+            response = requests.request(
+                method=HttpMethod.GET.value,
+                url=dataset_operation_url,
+                headers=headers,
+                data=None,
+            )
+
+            if not response.ok:
+                logger.error(response.text)
+                response.raise_for_status()
+
+            self._downscoped_credentials = response.json()
+            self._session_statistics["downscoped_credentials_updates"] += 1
+
+        return self._downscoped_credentials
+
+    @property
+    def dataset_storage_root(self) -> str:
+        """Cloud native root dataset path.
+
+        Returns:
+            str: cloud native root path
+        """
+        return self._provider_specific.get_dataset_storage_root()
+
+    def get_dataset_storage_url(self, input_sdpath: str) -> str:
+        """Get dataset storage url by input sd path.
+
+        Args:
+            input_sdpath (str): any sdpath in this dataset
+
+        Raises:
+            DatasetBoundariesError: The sdpath is not in the dataset's boundaries
+
+        Returns:
+            str: cloud native object storage path.
+        """
+        if not input_sdpath or not input_sdpath.removeprefix("sd://").startswith(
+            self._sdpath.removeprefix("sd://")
+        ):
+
+            raise DatasetBoundariesError
+
+        result_path = input_sdpath.removeprefix("sd://").replace(
+            self._sdpath.removeprefix("sd://"), ""
+        )
+
+        return self._provider_specific.get_dataset_storage_url(result_path)
+
+    def get_storage_options(self) -> dict:
+        """Get cloud native storage options.
+
+        Returns:
+            dict: Pythonic dict with cloud native auth/access instructions for storage
+        """
+        return self._provider_specific.get_storage_options()
+
+    def get_dataset_metadata(self, force_request: bool = False) -> dict:
+        """Get dataset metadata info.
+
+        Args:
+            force_request (bool): help to force run req for refresh creds.
+
+        Returns:
+            file_metadata (dict): filemetadata in pythonic dict format
+        """
+        dataset_properties = self.get_dataset_properties(force_request)
+        return dataset_properties.get("filemetadata", {})
+
+    @retry_request
+    def patch_dataset_metadata(self, file_metadata: dict) -> None:
+        """Update dataset with new filemetadata values taken from the input.
+
+        Args:
+            file_metadata (dict): filemetadata in pythonic dict format
+        """
+        update_dataset_url = f"{self._seismic_dms_url}/dataset{self.dataset_path}"
+        headers = {"Content-Type": "application/json", **self._get_auth_headers()}
+
+        response = requests.request(
+            method=HttpMethod.PATCH.value,
+            url=update_dataset_url,
+            headers=headers,
+            data=json.dumps({"filemetadata": file_metadata}),
+        )
+
+        if not response.ok:
+            logger.error(response.text)
+            response.raise_for_status()
diff --git a/src/sdfs/core.py b/src/sdfs/core.py
new file mode 100644
index 0000000..3fb6e7e
--- /dev/null
+++ b/src/sdfs/core.py
@@ -0,0 +1,952 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""OSDU Seismic Store pythonic interface."""
+
+import io
+import logging
+import os
+from copy import deepcopy
+from datetime import datetime
+from functools import wraps
+from itertools import chain
+from typing import Any, Callable, Generator, List, Optional, Union
+
+from fsspec import AbstractFileSystem, callbacks
+from fsspec import core as fsspec_core
+from fsspec.transaction import Transaction
+from fsspec.utils import setup_logging
+from typing_extensions import override
+
+from sdfs import SeismicDmsClient
+from sdfs.utils.http_utils import UNAUTHORIZED_CODE
+
+logger = logging.getLogger()
+
+
+if "SDFS_DEBUG" in os.environ:
+    setup_logging(logger=logger, level=os.getenv("SDFS_DEBUG"))
+
+
+def auth_handler(retries: int = 3) -> Callable:  # noqa: C901
+    """Hard refreshing final file system.
+
+    Args:
+        retries (int): number of retries
+
+    Returns:
+        func (Callable): class decorator.
+    """
+
+    def wrapper(method: Callable) -> Callable:
+        @wraps(method)
+        def refresh_final_fs(*args: int, **kwargs: int) -> Any:  # noqa: ANN401
+            for retry in range(retries):  # noqa: RET503
+                try:
+                    return method(*args, **kwargs)
+                except Exception as e:
+                    if retry == retries - 1:
+                        logger.exception(
+                            f"sdfs: {method.__name__} out of retries on exception: {e}"
+                        )
+                        raise
+
+                    if (
+                        hasattr(e, "code")
+                        and str(e.code) == str(UNAUTHORIZED_CODE)
+                        and len(args)
+                    ):
+                        self: "SDFileSystem" = args[0]
+                        if hasattr(self, "_get_final_fs"):
+                            self._get_seismic_dms_client().get_downscoped_credentials(
+                                True
+                            )
+                            self._get_final_fs(True)
+                            logger.warning(
+                                "sdfs: Final file system successfully refreshed"
+                            )
+                            continue
+
+                    raise
+
+        return refresh_final_fs
+
+    def decorate(cls: "SDFileSystem") -> "SDFileSystem":
+        for attr in cls.__dict__:
+            if (
+                callable(getattr(cls, attr))
+                and attr != "_get_seismic_dms_client"
+                and attr != "_get_final_fs"
+            ):
+                setattr(cls, attr, wrapper(getattr(cls, attr)))
+        return cls
+
+    return decorate
+
+
+@auth_handler()
+class SDFileSystem(AbstractFileSystem):
+    """Connect to OSDU Seismic Store."""
+
+    protocol = "sd"
+    default_size = 1024
+
+    _final_fs: AbstractFileSystem = None
+    _seismic_dms_client: SeismicDmsClient = None
+
+    def __init__(
+        self,
+        *,
+        sdpath: str,
+        seismic_dms_url: str,
+        access_token: Optional[str] = None,
+        refresh_url: Optional[str] = None,
+        refresh_token: Optional[str] = None,
+        client_id: Optional[str] = None,
+        client_secret: Optional[str] = None,
+        write_access: bool = False,
+        single_file: bool = False
+    ) -> None:
+        """Init of OSDU SDFileSystem.
+
+        Args:
+            sdpath (str): Path to a target seismic dataset on the OSDU Seismic Store
+            seismic_dms_url (str): OSDU Seismic DMS service URL
+            access_token (Optional[str]): oauth2 access token of the OSDU user
+            refresh_token (Optional[str]): oauth2 refresh token of the OSDU user
+            refresh_url (Optional[str]): oauth2 token refresh url of the OSDU user IdP
+            client_id (Optional[str]): oauth2 client application id
+            client_secret (Optional[str]): oauth2 client application secret
+            write_access (bool): If it is allowed for SDFS to modify data. Defaults to False
+            single_file (bool): Whether to treat the dataset as a single file, even if it consists of multiple segments. If True, the `read_block` method will read data from multiple segments as if it were a single file.  If False, `read_block` will only read from a single segment. Defaults to False.
+        """  # noqa: E501
+        self._is_single_file = single_file
+        super().__init__(
+            self,
+            sdpath=sdpath,
+            seismic_dms_url=seismic_dms_url,
+            access_token=access_token,
+            refresh_url=refresh_url,
+            refresh_token=refresh_token,
+            client_id=client_id,
+            client_secret=client_secret,
+            write_access=write_access,
+        )
+
+    def _get_seismic_dms_client(self) -> SeismicDmsClient:
+        # TODO(Yan Sushchynski): Figure out where storage_options are set # noqa: TD003
+        storage_options = deepcopy(self.storage_options)
+        storage_options.pop("single_file", None)
+        if self._seismic_dms_client is None:
+            self._seismic_dms_client = SeismicDmsClient(**storage_options)
+        return self._seismic_dms_client
+
+    def _get_final_fs(self, force: bool = False) -> AbstractFileSystem:
+        if self._final_fs is None or force:
+            seismic_dms_client = self._get_seismic_dms_client()
+            final_storage_url = seismic_dms_client.get_dataset_storage_url(
+                seismic_dms_client.sdpath
+            )
+            final_storage_options = self._get_seismic_dms_client().get_storage_options()
+            self._final_fs = fsspec_core.url_to_fs(
+                final_storage_url, **final_storage_options
+            )[0]
+
+        return self._final_fs
+
+    def _get_final_path(self, path: str) -> str:
+        return self._get_seismic_dms_client().get_dataset_storage_url(path)
+
+    @override
+    @property
+    def fsid(self) -> str:
+        return self.protocol
+
+    @override
+    @property
+    def transaction(self) -> Transaction:
+        return self._get_final_fs().transaction
+
+    @override
+    def start_transaction(self) -> Transaction:
+        return self._get_final_fs().start_transaction()
+
+    @override
+    def end_transaction(self) -> None:
+        self._get_final_fs().end_transaction()
+
+    @override
+    def invalidate_cache(self, path: Optional[str] = None) -> None:
+        if path:
+            final_path = self._get_final_path(path)
+            self._get_final_fs().invalidate_cache(path=final_path)
+        else:
+            self._get_final_fs().invalidate_cache()
+
+    @override
+    def mkdir(self, path: str, create_parents: bool = True, **kwargs: int) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().mkdir(
+            path=final_path, create_parents=create_parents, **kwargs
+        )
+
+    @override
+    def makedirs(self, path: str, exist_ok: bool = False) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().makedirs(path=final_path, exist_ok=exist_ok)
+
+    @override
+    def rmdir(self, path: str) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().rmdir(final_path)
+
+    @override
+    def ls(self, path: str, detail: bool = True, **kwargs: int) -> list:
+        _path = self._strip_protocol(path)
+        final_path = self._get_final_path(path)
+        _final_path = self._get_final_fs()._strip_protocol(path=final_path)
+
+        if detail:
+            return [
+                {
+                    "tenant": self._get_seismic_dms_client()._sd_tenant,
+                    "subproject": self._get_seismic_dms_client()._sd_subproject,
+                    "subproject_path": self._get_seismic_dms_client()._sd_subproject_path,  # noqa: E501
+                    "dataset": self._get_seismic_dms_client()._sd_dataset_name,
+                    **path_details,
+                }
+                for path_details in self._get_final_fs().ls(
+                    path=final_path, detail=detail, **kwargs
+                )
+            ]
+
+        return [
+            f"{_path}/"
+            if _final_path.strip("/") == str(p)
+            else os.path.join(_path, str(p).replace(_final_path, ""))
+            for p in self._get_final_fs().ls(path=final_path, detail=detail, **kwargs)
+        ]
+
+    @override
+    def _ls_from_cache(self, path: str) -> List[str]:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs()._ls_from_cache(path=final_path)
+
+    @override
+    def walk(
+        self,
+        path: str,
+        maxdepth: Optional[int] = None,
+        topdown: bool = True,
+        on_error: str = "omit",
+        **kwargs: int,
+    ) -> Generator:
+        final_path = self._get_final_path(path)
+
+        yield from self._get_final_fs().walk(
+            path=final_path,
+            maxdepth=maxdepth,
+            topdown=topdown,
+            on_error=on_error,
+            **kwargs,
+        )
+
+    @override
+    def find(
+        self,
+        path: str,
+        maxdepth: Optional[int] = None,
+        withdirs: bool = False,
+        detail: bool = False,
+        **kwargs: int,
+    ) -> list:
+        _path = self._strip_protocol(path)
+        final_path = self._get_final_path(path)
+        _final_path = self._get_final_fs()._strip_protocol(path=final_path)
+
+        if detail:
+            return [
+                {
+                    "tenant": self._get_seismic_dms_client()._sd_tenant,
+                    "subproject": self._get_seismic_dms_client()._sd_subproject,
+                    "subproject_path": self._get_seismic_dms_client()._sd_subproject_path,  # noqa: E501
+                    "dataset": self._get_seismic_dms_client()._sd_dataset_name,
+                    **path_details,
+                }
+                for path_details in self._get_final_fs().find(
+                    path=final_path,
+                    maxdepth=maxdepth,
+                    withdirs=withdirs,
+                    detail=detail,
+                    **kwargs,
+                )
+            ]
+
+        return [
+            os.path.join(_path, str(p).replace(_final_path, ""))
+            for p in self._get_final_fs().find(
+                path=final_path,
+                maxdepth=maxdepth,
+                withdirs=withdirs,
+                detail=detail,
+                **kwargs,
+            )
+        ]
+
+    @override
+    def du(
+        self,
+        path: str,
+        total: bool = True,
+        maxdepth: Optional[int] = None,
+        withdirs: bool = False,
+        **kwargs: int,
+    ) -> Union[int, dict]:
+        final_path = self._get_final_path(path)
+        out = self._get_final_fs().du(
+            path=final_path, total=total, maxdepth=maxdepth, withdirs=withdirs, **kwargs
+        )
+
+        if isinstance(out, dict):
+            _zero_sdpath = self._get_seismic_dms_client().sdpath
+            _sdpath_stripped = self._strip_protocol(_zero_sdpath)
+            _zero_final_path_stripped = self._get_final_fs()._strip_protocol(
+                path=self._get_final_path(_zero_sdpath)
+            )
+
+            return {
+                os.path.join(
+                    _sdpath_stripped, str(p).replace(
+                        _zero_final_path_stripped, "")
+                ): v
+                for p, v in out.items()
+            }
+
+        return out
+
+    @override
+    def glob(self, path: str, maxdepth: Optional[int] = None, **kwargs: int) -> list:
+        # works only inside dataset boundaries
+        final_path = self._get_final_path(path)
+
+        _root_path = self._strip_protocol(
+            self._get_seismic_dms_client().sdpath)
+        _root_final_path = self._get_final_fs()._strip_protocol(
+            path=self._get_seismic_dms_client().dataset_storage_root
+        )
+
+        return [
+            os.path.join(_root_path, str(p).replace(
+                _root_final_path, "").lstrip("/"))
+            for p in self._get_final_fs().glob(
+                path=final_path, maxdepth=maxdepth, **kwargs
+            )
+        ]
+
+    @override
+    def expand_path(
+        self,
+        path: Union[str, list],
+        recursive: bool = False,
+        maxdepth: Optional[int] = None,
+        **kwargs: int,
+    ) -> list:
+        # works only inside dataset boundaries
+        if isinstance(path, list):
+            final_path = [self._get_final_path(p) for p in path]
+        elif isinstance(path, str):
+            final_path = self._get_final_path(path)
+        else:
+            msg = "path must be str or list"
+            raise TypeError(msg)
+
+        _root_path = self._strip_protocol(
+            self._get_seismic_dms_client().sdpath)
+        _root_final_path = self._get_final_fs()._strip_protocol(
+            path=self._get_seismic_dms_client().dataset_storage_root
+        )
+
+        return [
+            os.path.join(_root_path, str(p).replace(
+                _root_final_path, "").lstrip("/"))
+            for p in self._get_final_fs().expand_path(
+                path=final_path, recursive=recursive, maxdepth=maxdepth, **kwargs
+            )
+        ]
+
+    @override
+    def exists(self, path: str, **kwargs: int) -> bool:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().exists(path=final_path, **kwargs)
+
+    @override
+    def lexists(self, path: str, **kwargs: int) -> bool:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().lexists(path=final_path, **kwargs)
+
+    @override
+    def info(self, path: str, **kwargs: int) -> dict:
+        if not self._is_single_file:
+            final_path = self._get_final_path(path)
+            return self._get_final_fs().info(path=final_path, **kwargs)
+        files = self._get_file_chunks(path)
+        return {
+            "name": path, 
+            "size": sum(f["size"] for f in files), 
+            "type": "segmented_file"
+        }
+
+    @override
+    def checksum(self, path: str) -> int:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().checksum(path=final_path)
+
+    @override
+    def size(self, path: str) -> int:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().size(path=final_path)
+
+    @override
+    def sizes(self, paths: List[str]) -> List[int]:
+        final_paths = [self._get_final_path(p) for p in paths]
+        return self._get_final_fs().sizes(paths=final_paths)
+
+    @override
+    def isdir(self, path: str) -> bool:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().isdir(path=final_path)
+
+    @override
+    def isfile(self, path: str) -> bool:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().isfile(path=final_path)
+
+    @override
+    def write_text(
+        self,
+        path: str,
+        value: str,
+        encoding: Optional[str] = None,
+        errors: Optional[str] = None,
+        newline: Optional[str] = None,
+        **kwargs: int,
+    ) -> int:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().write_text(
+            path=final_path,
+            value=value,
+            encoding=encoding,
+            errors=errors,
+            newline=newline,
+            **kwargs,
+        )
+
+    @override
+    def read_text(
+        self,
+        path: str,
+        encoding: Optional[str] = None,
+        errors: Optional[str] = None,
+        newline: Optional[str] = None,
+        **kwargs: int,
+    ) -> str:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().read_text(
+            path=final_path, encoding=encoding, errors=errors, newline=newline, **kwargs
+        )
+
+    def _validate_segments_naming_and_sort(
+        self,
+        dataset_name: str, 
+        files: list[dict]
+    ) -> list[dict]:
+        """Validate files naming and sort them.
+        
+        We assume that files are named with consecutive integers, e.g., 0, 1, 2...
+
+        Args:
+            dataset_name (str): sd dataset path
+            files (list[dict]): list of blobs
+
+        Returns:
+            list[dict]: sorted list of blobs
+        """
+        def sort_by_int_index(file: dict, dataset_prefix: str) -> int:
+            try:
+                return int(file["name"].removeprefix(dataset_prefix))
+            except (KeyError, ValueError) as e:
+                msg = f"The file {file} doesn't have an integer name."
+                raise ValueError(msg) from e
+
+        dataset_storage_folder = self._get_final_path(dataset_name)
+        dataset_folder_prefix = self._get_final_fs().info(
+            dataset_storage_folder)["name"] + "/"
+
+        sorted_files = sorted(files, key=lambda file: sort_by_int_index(
+            file, dataset_folder_prefix))
+        for n, file in enumerate(sorted_files):
+            file_int_name = int(
+                file["name"].removeprefix(dataset_folder_prefix))
+            if n != file_int_name:
+                msg = f"File name: {file_int_name} doesn't match index: {n}." \
+                      "File batches shouldn't have missing integers in the range."
+                raise ValueError(msg)
+        return sorted_files
+
+    def _get_file_chunks(self, dataset_name: str) -> list[dict]:
+        """Get file chunks from the dataset, which are a chunked logical file.
+
+        Args:
+            dataset_name (str): Name of the dataset in Seismic Service - the dataset 
+                                represents a folder in the cloud storage
+
+        Returns:
+            list[dict]: List of file segments
+        """
+        files = self.listdir(dataset_name) # type: list[dict]
+        files_len = len(files)
+
+        if files_len == 0:
+            msg = f"{dataset_name} is empty"
+            raise ValueError(msg)
+        if files_len == 1:
+            return files
+        return self._validate_segments_naming_and_sort(dataset_name, files)
+
+    def _read_block_from_segmented_file(
+        self, 
+        dataset_name: str, 
+        offset: int, 
+        length: int, 
+        delimiter: Optional[bytes] = None
+    ) -> bytes:
+        """Read block from segmented file.
+
+        It can read data from multiple files that constitute the logical file.
+
+        Args:
+            dataset_name (str): Name of the dataset in the 
+                                Cloud Storage - usually it is a folder
+            offset (int): Start offset
+            length (int): Length to read
+            delimiter (Optional[bytes]): Delimiter
+
+        Returns:
+            bytes: Data read from file
+        """
+        global_offset = offset
+        remaining_length = length
+        current_offset = 0
+        data = []
+        files = self._get_file_chunks(dataset_name)
+        for file in files:
+            file_size = self._get_final_fs().size(file["name"])
+            # If the global offset falls within the current segment file:
+            if global_offset < current_offset + file_size:
+                # Calculate the offset within the current segment file.  
+                # 'max(0, ...)' handles
+                # cases where the global offset is in the middle of a segment.
+                local_file_offset = max(global_offset - current_offset, 0)
+                read_length = min(remaining_length,
+                                  file_size - local_file_offset)
+                file_data = self._get_final_fs().read_block(
+                    file["name"], local_file_offset, read_length, delimiter)
+                data.append(file_data)
+                remaining_length -= read_length
+                if remaining_length <= 0:
+                    break
+            current_offset += file_size
+        return b''.join(data)
+
+    @override
+    def read_block(
+        self, fn: str, offset: int, length: int, delimiter: Optional[bytes] = None
+    ) -> bytes:
+        if not self._is_single_file:
+            final_path = self._get_final_path(fn)
+            return self._get_final_fs().read_block(
+                fn=final_path, offset=offset, length=length, delimiter=delimiter
+            )
+        return self._read_block_from_segmented_file(fn, offset, length, delimiter)
+
+    @override
+    def pipe_file(self, path: str, value: Optional[bytes], **kwargs: int) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().pipe_file(final_path, value, **kwargs)
+
+    @override
+    def pipe(
+        self, path: Union[str, dict], value: Optional[bytes] = None, **kwargs: int
+    ) -> None:
+        if isinstance(path, dict) and value is None:
+            final_path = {self._get_final_path(p): b for p, b in path.items()}
+        elif isinstance(path, str) and value is not None:
+            final_path = self._get_final_path(path)
+        else:
+            msg = "path must be str or dict (and check corresponding value)"
+            raise TypeError(msg)
+
+        self._get_final_fs().pipe(path=final_path, value=value, **kwargs)
+
+    @override
+    def cat_file(
+        self,
+        path: str,
+        start: Optional[int] = None,
+        end: Optional[int] = None,
+        **kwargs: int,
+    ) -> bytes:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().cat_file(
+            path=final_path, start=start, end=end, **kwargs
+        )
+
+    @override
+    def cat_ranges(
+        self,
+        paths: List[str],
+        starts: Union[int, List[int]],
+        ends: Union[int, List[int]],
+        max_gap: None = None,
+        on_error: str = "return",
+        **kwargs: int,
+    ) -> List[bytes]:
+        if self._is_single_file:
+            files = [self._get_file_chunks(p) for p in paths]
+            files = list(chain.from_iterable(files))
+            final_paths = [f["name"] for f in files]
+        else:
+            final_paths = [self._get_final_path(p) for p in paths]
+        return self._get_final_fs().cat_ranges(
+            paths=final_paths,
+            starts=starts,
+            ends=ends,
+            max_gap=max_gap,
+            batch_size=len(final_paths),
+            on_error=on_error,
+            **kwargs,
+        )
+
+    @override
+    def cat(
+        self,
+        path: Union[str, list],
+        recursive: bool = False,
+        on_error: str = "raise",
+        **kwargs: int,
+    ) -> Union[bytes, dict]:
+        if isinstance(path, list):
+            final_path = [self._get_final_path(p) for p in path]
+        else:
+            final_path = self._get_final_path(path)
+
+        out = self._get_final_fs().cat(
+            path=final_path, recursive=recursive, on_error=on_error, **kwargs
+        )
+
+        if isinstance(out, dict):
+            _zero_sdpath = self._get_seismic_dms_client().sdpath
+            _sdpath_stripped = self._strip_protocol(_zero_sdpath)
+            _zero_final_path_stripped = self._get_final_fs()._strip_protocol(
+                path=self._get_final_path(_zero_sdpath)
+            )
+
+            return {
+                os.path.join(
+                    _sdpath_stripped, str(p).replace(
+                        _zero_final_path_stripped, "")
+                ): v
+                for p, v in out.items()
+            }
+
+        return out
+
+    @override
+    def get_file(
+        self,
+        rpath: str,
+        lpath: str,
+        callback: Callable = callbacks._DEFAULT_CALLBACK,
+        outfile: Union[io.IOBase, io.TextIOWrapper] = None,
+        **kwargs: int,
+    ) -> None:
+        final_rpath = self._get_final_path(rpath)
+        self._get_final_fs().get_file(
+            rpath=final_rpath, lpath=lpath, callback=callback, outfile=outfile, **kwargs
+        )
+
+    @override
+    def get(
+        self,
+        rpath: str,
+        lpath: str,
+        recursive: bool = False,
+        callback: Callable = callbacks._DEFAULT_CALLBACK,
+        maxdepth: Optional[int] = None,
+        **kwargs: int,
+    ) -> None:
+        if isinstance(rpath, list):
+            final_rpath = [self._get_final_path(p) for p in rpath]
+        else:
+            final_rpath = self._get_final_path(rpath)
+
+        self._get_final_fs().get(
+            rpath=final_rpath,
+            lpath=lpath,
+            recursive=recursive,
+            callback=callback,
+            maxdepth=maxdepth,
+            **kwargs,
+        )
+
+    @override
+    def put_file(
+        self,
+        lpath: str,
+        rpath: str,
+        callback: Callable = callbacks._DEFAULT_CALLBACK,
+        **kwargs: int,
+    ) -> None:
+        final_rpath = self._get_final_path(rpath)
+        self._get_final_fs().put_file(
+            lpath=lpath, rpath=final_rpath, callback=callback, **kwargs
+        )
+
+    @override
+    def put(
+        self,
+        lpath: str,
+        rpath: str,
+        recursive: bool = False,
+        callback: Callable = callbacks._DEFAULT_CALLBACK,
+        maxdepth: Optional[int] = None,
+        **kwargs: int,
+    ) -> None:
+        if isinstance(rpath, list):
+            final_rpath = [self._get_final_path(p) for p in rpath]
+        else:
+            final_rpath = self._get_final_path(rpath)
+
+        self._get_final_fs().put(
+            lpath=lpath,
+            rpath=final_rpath,
+            recursive=recursive,
+            callback=callback,
+            maxdepth=maxdepth,
+            **kwargs,
+        )
+
+    @override
+    def head(self, path: str, size: int = default_size) -> str:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().head(path=final_path, size=size)
+
+    @override
+    def tail(self, path: str, size: int = default_size) -> str:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().tail(path=final_path, size=size)
+
+    @override
+    def cp_file(self, path1: str, path2: str, **kwargs: int) -> None:
+        final_path1 = self._get_final_path(path1)
+        final_path2 = self._get_final_path(path2)
+        self._get_final_fs().cp_file(path1=final_path1, path2=final_path2, **kwargs)
+
+    @override
+    def copy(
+        self,
+        path1: Union[str, list],
+        path2: Union[str, list],
+        recursive: bool = False,
+        maxdepth: Optional[int] = None,
+        on_error: Optional[str] = None,
+        **kwargs: int,
+    ) -> None:
+        if isinstance(path1, list) and isinstance(path2, list):
+            final_path1 = [self._get_final_path(p) for p in path1]
+            final_path2 = [self._get_final_path(p) for p in path2]
+        elif isinstance(path1, str) and isinstance(path2, str):
+            final_path1 = self._get_final_path(path1)
+            final_path2 = self._get_final_path(path2)
+        else:
+            msg = "paths must be str or list"
+            raise TypeError(msg)
+
+        self._get_final_fs().copy(
+            path1=final_path1,
+            path2=final_path2,
+            recursive=recursive,
+            maxdepth=maxdepth,
+            on_error=on_error,
+            **kwargs,
+        )
+
+    @override
+    def mv(
+        self,
+        path1: str,
+        path2: str,
+        recursive: bool = False,
+        maxdepth: Optional[int] = None,
+        **kwargs: int,
+    ) -> None:
+        final_path1 = self._get_final_path(path1)
+        final_path2 = self._get_final_path(path2)
+        self._get_final_fs().mv(
+            path1=final_path1,
+            path2=final_path2,
+            recursive=recursive,
+            maxdepth=maxdepth,
+            **kwargs,
+        )
+
+    @override
+    def rm_file(self, path: str) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().rm_file(path=final_path)
+
+    @override
+    def _rm(self, path: str) -> None:
+        # this is the old name for the method, prefer rm_file
+        final_path = self._get_final_path(path)
+        self._get_final_fs().rm_file(path=final_path)
+
+    @override
+    def rm(
+        self,
+        path: Union[str, list],
+        recursive: bool = False,
+        maxdepth: Optional[int] = None,
+    ) -> None:
+        if isinstance(path, list):
+            final_path = [self._get_final_path(p) for p in path]
+        else:
+            final_path = self._get_final_path(path)
+
+        self._get_final_fs().rm(path=final_path, recursive=recursive, maxdepth=maxdepth)
+
+    @override
+    def _open(
+        self,
+        path: str,
+        mode: str = "rb",
+        block_size: Optional[int] = None,
+        autocommit: bool = True,
+        cache_options: Optional[dict] = None,
+        **kwargs: int,
+    ) -> io.IOBase:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs()._open(
+            path=final_path,
+            mode=mode,
+            block_size=block_size,
+            autocommit=autocommit,
+            cache_options=cache_options,
+            **kwargs,
+        )
+
+    @override
+    def open(
+        self,
+        path: str,
+        mode: str = "rb",
+        block_size: Optional[int] = None,
+        cache_options: Optional[dict] = None,
+        compression: Optional[str] = None,
+        **kwargs: int,
+    ) -> Union[io.IOBase, io.TextIOWrapper]:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().open(
+            path=final_path,
+            mode=mode,
+            block_size=block_size,
+            cache_options=cache_options,
+            compression=compression,
+            **kwargs,
+        )
+
+    @override
+    def touch(self, path: str, truncate: bool = True, **kwargs: int) -> None:
+        final_path = self._get_final_path(path)
+        self._get_final_fs().touch(path=final_path, truncate=truncate, **kwargs)
+
+    @override
+    def ukey(self, path: str) -> str:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().ukey(path=final_path)
+
+    @override
+    def created(self, path: str) -> datetime:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().created(path=final_path)
+
+    @override
+    def modified(self, path: str) -> datetime:
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().modified(path=final_path)
+
+    # ------------------------------------------------------------------------
+    # Aliases
+
+    @override
+    def sign(self, path: str, expiration: int = 100, **kwargs: int) -> str:
+        # GC: AttributeError: you need a private key to sign credentials.
+        # The credentials you are currently using <class 'google.oauth2.credentials.Credentials'> just contains a token.  # noqa: E501
+        # See https://googleapis.dev/python/google-api-core/latest/auth.html#setting-up-a-service-account for more details.  # noqa: E501
+        final_path = self._get_final_path(path)
+        return self._get_final_fs().sign(
+            path=final_path, expiration=expiration, **kwargs
+        )
+
+    # another aliases inherited
+
+    # ------------------------------------------------------------------------
+    # Custom Seismic Store methods
+
+    def dataset_audit(self) -> dict:
+        """Checking basic du information of dataset by root path.
+
+        Returns:
+            dict: generated dataset metadata
+        """
+        lst = self.du(self._get_seismic_dms_client().sdpath, total=False)
+        size = sum(lst.values())
+
+        nobjects = len(lst)
+
+        logger.info(f" - Disk Usage, B: {size}")
+        logger.info(f" - Number of objects: {nobjects}")
+
+        return {"nobjects": nobjects, "size": size}
+
+    def update_dataset_metadata(self) -> None:
+        """Update dataset metadata property with seismic dms client."""
+        audit_metadata = self.dataset_audit()
+        self._get_seismic_dms_client().patch_dataset_metadata(
+            {"type": "GENERIC", **audit_metadata}
+        )
+
+    # ------------------------------------------------------------------------
+    # AbstractBufferedFile
+
+    @override
+    def _initiate_upload(self) -> None:
+        self._get_final_fs()._initiate_upload()
+
+    @override
+    def _fetch_range(self, start: int, end: int) -> bytes:
+        return self._get_final_fs()._fetch_range(start, end)
diff --git a/src/sdfs/exceptions.py b/src/sdfs/exceptions.py
new file mode 100644
index 0000000..31e9646
--- /dev/null
+++ b/src/sdfs/exceptions.py
@@ -0,0 +1,48 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""SDFS Custom Exceptions."""
+
+
+class ValidationError(Exception):
+    """Raise when there is a validation error in the code."""
+
+
+class AuthStrategyError(Exception):
+    """Raise when there is an error related the authentication strategy."""
+
+
+class CredentialsError(Exception):
+    """Raise when the client credentials invalid."""
+
+
+class ServiceProviderCodeNotSpecifiedError(Exception):
+    """Raise when the service provider code not retrieved from Seismic Store Service."""
+
+    def __init__(self) -> None:
+        """Init of ServiceProviderCodeNotSpecifiedError."""
+        self.message = "Service provider code was not specified"
+        super().__init__(self.message)
+
+
+class DatasetBoundariesError(Exception):
+    """Raise when there is an error related to dataset boundaries."""
+
+    def __init__(self) -> None:
+        """Init of DatasetBoundariesError."""
+        self.message = "The sdpath is not in the dataset's boundaries"
+        super().__init__(self.message)
diff --git a/src/sdfs/providers/__init__.py b/src/sdfs/providers/__init__.py
new file mode 100644
index 0000000..f1e233c
--- /dev/null
+++ b/src/sdfs/providers/__init__.py
@@ -0,0 +1,34 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""This module exposes all cloud providers supported by the system and it's factory."""
+
+from sdfs.providers.anthos import AnthosSpecific
+from sdfs.providers.aws import AwsSpecific
+from sdfs.providers.azure import AzureSpecific
+from sdfs.providers.factory import ProviderSpecificFactory
+from sdfs.providers.google import GoogleSpecific
+from sdfs.providers.ibm import IbmSpecific
+
+__all__ = [
+    "AnthosSpecific",
+    "AwsSpecific",
+    "AzureSpecific",
+    "ProviderSpecificFactory",
+    "GoogleSpecific",
+    "IbmSpecific",
+]
diff --git a/src/sdfs/providers/abstract_provider.py b/src/sdfs/providers/abstract_provider.py
new file mode 100644
index 0000000..d733915
--- /dev/null
+++ b/src/sdfs/providers/abstract_provider.py
@@ -0,0 +1,85 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""ProviderSpecific."""
+from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Optional
+
+from sdfs.exceptions import ValidationError
+
+if TYPE_CHECKING:
+    # avoid circular import
+    from sdfs.clients.seismic_dms_client import SeismicDmsClient
+
+
+class ProviderSpecific(ABC):
+    """An abstract cls to integrate cloud providers with the OSDU SeismicDMS service."""
+
+    _seismic_dms_client: "SeismicDmsClient" = None
+
+    def __init__(self, seismic_dms_client: "SeismicDmsClient") -> None:
+        """Init ProviderSpecific and validate params.
+
+        Args:
+            seismic_dms_client (SeismicDmsClient): seismic_dms_client
+
+        Raises:
+            ValidationError: seismic_dms_client is required
+        """
+        if not seismic_dms_client:
+            msg = "seismic_dms_client is required"
+            raise ValidationError(msg)
+
+        self._seismic_dms_client = seismic_dms_client
+
+    @abstractmethod
+    def get_storage_options(self) -> None:
+        """Abstract storage options retrieval.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    @abstractmethod
+    def get_dataset_storage_root(self) -> str:
+        """Abstract cloud native root dataset path.
+
+        Raises:
+            NotImplementedError: not implemented
+
+        Returns:
+            str: cloud native root path
+        """
+        raise NotImplementedError
+
+    @abstractmethod
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Abstract dataset storage url retrieval.
+
+        Args:
+            subpath_from_dataset_root (Optional[str]): File name of the dataset.
+
+        Raises:
+            NotImplementedError: not implemented
+
+        Returns:
+            str: not implemented
+        """
+        raise NotImplementedError
diff --git a/src/sdfs/providers/anthos.py b/src/sdfs/providers/anthos.py
new file mode 100644
index 0000000..94f9f5f
--- /dev/null
+++ b/src/sdfs/providers/anthos.py
@@ -0,0 +1,60 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""AnthosSpecific."""
+from typing import Optional
+
+from .abstract_provider import ProviderSpecific
+
+
+class AnthosSpecific(ProviderSpecific):
+    """Dedicated Anthos client for integrating seismic dms service with cloud storage."""  # noqa: E501
+
+    def get_storage_options(self) -> None:
+        """Get storage options for Anthos.
+
+        Retrieves downscoped credentials from Seismic DMS service
+        and compose a dictionary with storage options.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_root(self) -> None:
+        """Cloud native root dataset path.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Get dataset storage url for Anthos.
+
+        Compose and a provide a string in provider-specific format representing a
+        URL for accessing the seismic dataset storage location.
+
+        Args:
+            subpath_from_dataset_root (str, optional): File name of the dataset.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
diff --git a/src/sdfs/providers/aws.py b/src/sdfs/providers/aws.py
new file mode 100644
index 0000000..0eb5a3c
--- /dev/null
+++ b/src/sdfs/providers/aws.py
@@ -0,0 +1,60 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""AwsSpecific."""
+from typing import Optional
+
+from .abstract_provider import ProviderSpecific
+
+
+class AwsSpecific(ProviderSpecific):
+    """Dedicated Aws client for integrating seismic dms service with cloud storage."""
+
+    def get_storage_options(self) -> None:
+        """Get storage options for Aws.
+
+        Retrieves downscoped credentials from Seismic DMS service
+        and compose a dictionary with storage options.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_root(self) -> None:
+        """Cloud native root dataset path.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Get dataset storage url for Aws.
+
+        Compose and a provide a string in provider-specific format representing a
+        URL for accessing the seismic dataset storage location.
+
+        Args:
+            subpath_from_dataset_root (str, optional): File name of the dataset.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
diff --git a/src/sdfs/providers/azure.py b/src/sdfs/providers/azure.py
new file mode 100644
index 0000000..0ea7719
--- /dev/null
+++ b/src/sdfs/providers/azure.py
@@ -0,0 +1,60 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""AzureSpecific."""
+from typing import Optional
+
+from .abstract_provider import ProviderSpecific
+
+
+class AzureSpecific(ProviderSpecific):
+    """Dedicated Azure client for integrating seismic dms service with cloud storage."""
+
+    def get_storage_options(self) -> None:
+        """Get storage options for Azure.
+
+        Retrieves downscoped credentials from Seismic DMS service
+        and compose a dictionary with storage options.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_root(self) -> None:
+        """Cloud native root dataset path.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Get dataset storage url for Azure.
+
+        Compose and a provide a string in provider-specific format representing a
+        URL for accessing the seismic dataset storage location.
+
+        Args:
+            subpath_from_dataset_root (str, optional): File name of the dataset.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
diff --git a/src/sdfs/providers/factory.py b/src/sdfs/providers/factory.py
new file mode 100644
index 0000000..e321ca6
--- /dev/null
+++ b/src/sdfs/providers/factory.py
@@ -0,0 +1,95 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""ProviderSpecificFactory."""
+from typing import TYPE_CHECKING, Optional
+
+from sdfs.exceptions import ServiceProviderCodeNotSpecifiedError, ValidationError
+
+from .abstract_provider import ProviderSpecific
+from .anthos import AnthosSpecific
+from .aws import AwsSpecific
+from .azure import AzureSpecific
+from .google import GoogleSpecific
+from .ibm import IbmSpecific
+
+if TYPE_CHECKING:
+    # avoid circular import
+    from sdfs.clients.seismic_dms_client import SeismicDmsClient
+
+
+class ProviderSpecificFactory:
+    """Factory to create instances of custom cloud provider clients.
+
+    Raises:
+        TypeError: When the seismic_dms_client is not provided
+        TypeError: When a service provider code is not specified
+        ValueError: When the service provider code is not in the
+                                _provider_specific_registry
+    """
+
+    _seismic_dms_client: "SeismicDmsClient"
+    _provider_specific_registry = {
+        "gc": GoogleSpecific,
+        "aws": AwsSpecific,
+        "azure": AzureSpecific,
+        "ibm": IbmSpecific,
+        "anthos": AnthosSpecific,
+    }
+
+    def __init__(self, seismic_dms_client: "SeismicDmsClient") -> None:
+        """Factory initialization.
+
+        Args:
+            seismic_dms_client (SeismicDmsClient): seismic_dms_client
+
+        Raises:
+            ValidationError: seismic_dms_client is required
+        """
+        if not seismic_dms_client:
+            msg = "seismic_dms_client is required"
+            raise ValidationError(msg)
+
+        self._seismic_dms_client = seismic_dms_client
+
+    def get_provider_specific(self, code: Optional[str] = None) -> ProviderSpecific:
+        """Return a client of the cloud provider based on the provider_code.
+
+        The service provider code is retrieved in the get_dataset_properties method
+        of the seismic_dms_client when it is instantiated.
+
+        Args:
+            code (str, optional): Cloud provider code. Defaults to None.
+
+        Raises:
+            ServiceProviderCodeNotSpecifiedError: The service provider code is not specified
+            ValidationError: The service provider code is not valid
+
+        Returns:
+            ProviderSpecific: Instance of the cloud provider custom client.
+        """  # noqa: E501
+        # Get the provider code from arguments or SeismicDmsClient
+        code = code or self._seismic_dms_client.get_service_provider_code()
+        if not code:
+            raise ServiceProviderCodeNotSpecifiedError
+
+        if code not in self._provider_specific_registry:
+            msg = f"{code} is not valid"
+            raise ValidationError(msg)
+
+        cloud_provider_class = self._provider_specific_registry[code]
+        return cloud_provider_class(self._seismic_dms_client)
diff --git a/src/sdfs/providers/google.py b/src/sdfs/providers/google.py
new file mode 100644
index 0000000..019b902
--- /dev/null
+++ b/src/sdfs/providers/google.py
@@ -0,0 +1,69 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""GoogleSpecific."""
+from os import path
+from typing import Optional
+
+from .abstract_provider import ProviderSpecific
+
+
+class GoogleSpecific(ProviderSpecific):
+    """Dedicated GCP client for integrating seismic dms service with cloud storage."""
+
+    def get_storage_options(self) -> dict:
+        """Get storage options for GC.
+
+        Retrieves downscoped credentials from Seismic DMS service
+        and compose a dictionary with storage options.
+
+        Returns:
+            dict: Dictionary with the required credentials to access
+            a dataset defined by the sdpath property.
+        """
+        downscoped_credentials = self._seismic_dms_client.get_downscoped_credentials()
+        return {"token": downscoped_credentials["access_token"]}
+
+    def get_dataset_storage_root(self) -> str:
+        """Cloud native root dataset path.
+
+        Returns:
+            str: cloud native root path
+        """
+        dataset_properties = self._seismic_dms_client.get_dataset_properties()
+        gcsurl = dataset_properties["gcsurl"]
+        return path.join("gs://", gcsurl)  # noqa: PTH118 cloud path
+
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Get dataset storage url for GC.
+
+        Compose and a provide a string in provider-specific format representing a
+        URL for accessing the seismic dataset storage location.
+
+        Args:
+            subpath_from_dataset_root (str, optional): File name of the dataset.
+
+        Returns:
+            str: URL where the dataset is stored in the cloud provider
+        """
+        dataset_properties = self._seismic_dms_client.get_dataset_properties()
+        gcsurl = dataset_properties["gcsurl"]
+        subpath_from_dataset_root = subpath_from_dataset_root.lstrip("/")
+
+        return f"gs://{gcsurl}/{subpath_from_dataset_root}"
diff --git a/src/sdfs/providers/ibm.py b/src/sdfs/providers/ibm.py
new file mode 100644
index 0000000..8ffc9ef
--- /dev/null
+++ b/src/sdfs/providers/ibm.py
@@ -0,0 +1,60 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""IbmSpecific."""
+from typing import Optional
+
+from .abstract_provider import ProviderSpecific
+
+
+class IbmSpecific(ProviderSpecific):
+    """Dedicated Ibm client for integrating seismic dms service with cloud storage."""
+
+    def get_storage_options(self) -> None:
+        """Get storage options for Ibm.
+
+        Retrieves downscoped credentials from Seismic DMS service
+        and compose a dictionary with storage options.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_root(self) -> None:
+        """Cloud native root dataset path.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
+
+    def get_dataset_storage_url(
+        self, subpath_from_dataset_root: Optional[str] = None
+    ) -> str:
+        """Get dataset storage url for Ibm.
+
+        Compose and a provide a string in provider-specific format representing a
+        URL for accessing the seismic dataset storage location.
+
+        Args:
+            subpath_from_dataset_root (str, optional): File name of the dataset.
+
+        Raises:
+            NotImplementedError: not implemented
+        """
+        raise NotImplementedError
diff --git a/src/sdfs/utils/__init__.py b/src/sdfs/utils/__init__.py
new file mode 100644
index 0000000..92a6ccf
--- /dev/null
+++ b/src/sdfs/utils/__init__.py
@@ -0,0 +1,18 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Utils."""
diff --git a/src/sdfs/utils/http_utils.py b/src/sdfs/utils/http_utils.py
new file mode 100644
index 0000000..3ac1ba3
--- /dev/null
+++ b/src/sdfs/utils/http_utils.py
@@ -0,0 +1,31 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Utils for improve python request library user experience."""
+import enum
+
+UNAUTHORIZED_CODE = 401
+
+
+class HttpMethod(enum.Enum):
+    """HttpMethod ENUM."""
+
+    GET = "GET"
+    POST = "POST"
+    PUT = "PUT"
+    PATCH = "PATCH"
+    DELETE = "DELETE"
diff --git a/src/sdfs/utils/validators.py b/src/sdfs/utils/validators.py
new file mode 100644
index 0000000..ae2eac3
--- /dev/null
+++ b/src/sdfs/utils/validators.py
@@ -0,0 +1,32 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Common validators."""
+from urllib.parse import urlparse
+
+
+def is_valid_url(url: str) -> bool:
+    """It is simple URL validator which check url pattern and netloc.
+
+    Args:
+        url (str): URL
+
+    Returns:
+        bool: result of checking
+    """
+    parsed_url = urlparse(url)
+    return bool(parsed_url.scheme and parsed_url.netloc)
diff --git a/tests/.env.example b/tests/.env.example
new file mode 100644
index 0000000..d188032
--- /dev/null
+++ b/tests/.env.example
@@ -0,0 +1,11 @@
+SD_PATH=... (default value: sd://osdu/osdu-mdio/autotest_path/integration)
+SEISMIC_STORE_URL=... (default value: https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3)
+
+# 
+ACCESS_TOKEN=...
+
+<!-- For refresh token use case -->
+AUTH_REFRESH_TOKEN_URL=... (have default google auth URL)
+REFRESH_TOKEN=...
+CLIENT_ID=...
+CLIENT_SECRET=...
\ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..aadbd4e
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,18 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Test suite for the SDFS package."""
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
new file mode 100644
index 0000000..8eadb73
--- /dev/null
+++ b/tests/integration/__init__.py
@@ -0,0 +1,18 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Integration tests for the SDFS package."""
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
new file mode 100644
index 0000000..46f99e3
--- /dev/null
+++ b/tests/integration/conftest.py
@@ -0,0 +1,137 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Integration tests shared fixtures."""
+import logging
+import os
+from pathlib import Path
+from typing import Generator
+
+import pytest
+from sdfs import SDFileSystem
+
+logger = logging.getLogger()
+
+# osdu auth
+AUTH_REFRESH_TOKEN_URL = os.environ.get(
+    "AUTH_REFRESH_TOKEN_URL", "https://oauth2.googleapis.com/token"
+)
+ACCESS_TOKEN = os.environ.get("ACCESS_TOKEN", None)
+REFRESH_TOKEN = os.environ.get("REFRESH_TOKEN", None)
+CLIENT_ID = os.environ.get("CLIENT_ID", None)
+CLIENT_SECRET = os.environ.get("CLIENT_SECRET", None)
+# seismic service
+SD_PATH = os.environ.get(
+    "SD_PATH",
+    "sd://osdu/osdu-mdio/autotest_path/integration",
+)
+SEISMIC_STORE_URL = os.environ.get(
+    "SEISMIC_STORE_URL",
+    "https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3",
+)
+
+
+@pytest.fixture(scope="module")
+def root_sd_path() -> str:
+    """Root of test dataset sd path.
+
+    Returns:
+        str: root sd path
+    """
+    return SD_PATH
+
+
+@pytest.fixture(scope="module")
+def write_sd_fs_conn(root_sd_path: str) -> SDFileSystem:
+    """Initialize SD File System fixture.
+
+    Args:
+        root_sd_path (str): root sd path
+
+    Returns:
+        SDFileSystem: write sd fs obj access to test dataset
+    """
+    assert SEISMIC_STORE_URL
+    assert REFRESH_TOKEN
+    assert AUTH_REFRESH_TOKEN_URL
+    assert CLIENT_ID
+    assert CLIENT_SECRET
+
+    return SDFileSystem(
+        sdpath=root_sd_path,
+        seismic_dms_url=SEISMIC_STORE_URL,
+        write_access=True,
+        refresh_token=REFRESH_TOKEN,
+        refresh_url=AUTH_REFRESH_TOKEN_URL,
+        client_id=CLIENT_ID,
+        client_secret=CLIENT_SECRET,
+    )
+
+
+@pytest.fixture(scope="module")
+def write_single_sd_fs_conn(root_sd_path: str) -> SDFileSystem:
+    """Initialize SD File System fixture with single file.
+
+    Args:
+        root_sd_path (str): root sd path
+
+    Returns:
+        SDFileSystem: write sd fs obj access to test dataset
+    """
+    assert SEISMIC_STORE_URL
+    assert REFRESH_TOKEN
+    assert AUTH_REFRESH_TOKEN_URL
+    assert CLIENT_ID
+    assert CLIENT_SECRET
+
+    return SDFileSystem(
+        sdpath=root_sd_path,
+        seismic_dms_url=SEISMIC_STORE_URL,
+        write_access=True,
+        refresh_token=REFRESH_TOKEN,
+        refresh_url=AUTH_REFRESH_TOKEN_URL,
+        client_id=CLIENT_ID,
+        client_secret=CLIENT_SECRET,
+        single_file=True
+    )
+
+
+@pytest.fixture(autouse=True)
+def clean_up(write_sd_fs_conn: SDFileSystem) -> Generator:  # noqa: PT004
+    """Clean up objects from prev Sdfs test cases.
+
+    Args:
+        write_sd_fs_conn (SDFileSystem): write sd fs obj access to test dataset
+    """
+    root_test_dataset_path = SD_PATH
+    logger.info(f"Setup: {root_test_dataset_path}")
+
+    objects = write_sd_fs_conn.find(root_test_dataset_path)
+    to_remove = [obj for obj in objects if obj != f"{SD_PATH}/"]
+    if to_remove:
+        write_sd_fs_conn.rm(to_remove)
+
+    objects = write_sd_fs_conn.find(root_test_dataset_path)
+    to_remove = [obj for obj in objects if obj != f"{SD_PATH}/"]
+    [write_sd_fs_conn.rmdir(path) for path in to_remove]
+
+    yield
+
+    logger.info(f"Teardown: {root_test_dataset_path}")
+
+    if Path("tests/download_test.txt").is_file():
+        Path("tests/download_test.txt").unlink()
diff --git a/tests/integration/test_sd_file_system_integration.py b/tests/integration/test_sd_file_system_integration.py
new file mode 100644
index 0000000..1b5dfc2
--- /dev/null
+++ b/tests/integration/test_sd_file_system_integration.py
@@ -0,0 +1,554 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestSDFileSystemIntegration."""
+import json
+import logging
+import os
+from datetime import datetime
+from pathlib import Path
+
+import pytest
+from sdfs import SDFileSystem
+
+TEST_SD_PATH = os.environ.get("SD_PATH", "")
+LOCAL_FILE_PATH = "tests/local_test.txt"
+DOWNLOAD_FILE_PATH = "tests/download_test.txt"
+
+logger = logging.getLogger(__name__)
+
+
+class TestSDFileSystemIntegration:
+    """TestSDFileSystemIntegration."""
+
+    def test_sd_file_system_exist(self, write_sd_fs_conn: SDFileSystem) -> None:
+        """Test SD file system exist in this exec context.
+
+        Args:
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        assert write_sd_fs_conn
+        sd_json = write_sd_fs_conn.to_json()
+        assert isinstance(write_sd_fs_conn.from_json(sd_json), SDFileSystem)
+
+        sd_options = json.loads(sd_json)
+        assert sd_options["cls"] == "sdfs.core.SDFileSystem"
+        assert sd_options["protocol"] == "sd"
+        assert TEST_SD_PATH in sd_options["sdpath"]
+
+    def test_transaction_management(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test transaction management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        logger.info("------------ start transaction:")
+        write_sd_fs_conn.start_transaction()
+        logger.info(f"active?: {write_sd_fs_conn.transaction.fs._intrans}")
+        assert write_sd_fs_conn.transaction.fs._intrans
+
+        write_sd_fs_conn.write_text(
+            root_sd_path + "/test.txt",
+            "I love you!",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+
+        logger.info("------------ before end transaction:")
+        logger.info(f"files: {write_sd_fs_conn.transaction.files}")
+
+        assert len(write_sd_fs_conn.transaction.files) == 1
+        assert "test.txt" in str(write_sd_fs_conn.transaction.files)
+
+        logger.info("------------ end transaction:")
+        write_sd_fs_conn.end_transaction()
+        logger.info(f"active?: {write_sd_fs_conn.transaction.fs._intrans}")
+        assert not write_sd_fs_conn.transaction.fs._intrans
+
+        logger.info("------------ text.txt some info:")
+        current_size = write_sd_fs_conn.info(root_sd_path + "/test.txt")["size"]
+        logger.info(f"size: {current_size}")
+        assert current_size == 11
+
+        time_created = write_sd_fs_conn.info(root_sd_path + "/test.txt")["timeCreated"]
+        logger.info(f"timeCreated: {time_created}")
+
+        result = write_sd_fs_conn.read_text(
+            root_sd_path + "/test.txt", encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "I love you!"
+
+    def test_dir_management(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test dir management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        location = root_sd_path + "/new_location/test.txt"
+
+        write_sd_fs_conn.upload(LOCAL_FILE_PATH, location, None)
+
+        with pytest.raises(OSError, match="Forbidden"):
+            write_sd_fs_conn.mkdir(root_sd_path + "/test_location/gc/v2")
+
+        assert write_sd_fs_conn.isdir(root_sd_path + "/new_location")
+        object_type = write_sd_fs_conn.info(root_sd_path + "/new_location")["type"]
+        logger.info(f"------------ /test_location type: {object_type}")
+        assert object_type == "directory"
+
+        logger.info("------------ removing text file")
+        write_sd_fs_conn.rm_file(location)
+
+        assert not write_sd_fs_conn.isdir(root_sd_path + "/new_location")
+
+    def test_sd_file_system_interface(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test SD file system interface.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        location = root_sd_path + "/new_location/test.txt"
+
+        write_sd_fs_conn.upload(LOCAL_FILE_PATH, location, None)
+
+        result = write_sd_fs_conn.read_text(
+            location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "Hello From SDFS"
+
+        result_list = write_sd_fs_conn.ls(root_sd_path)
+        assert len(result_list) == 1
+        assert "tenant" in result_list[0]
+
+        result_list = write_sd_fs_conn.ls(root_sd_path)
+
+        assert not write_sd_fs_conn._ls_from_cache(root_sd_path)
+
+        for idx, data in enumerate(write_sd_fs_conn.walk(location)):
+            assert idx < 1
+            [path, dirs, files] = data
+
+            assert "new_location" in path
+            assert len(dirs) == 0
+            assert len(files) == 1
+
+            logger.info(f"------------ walk {location}: {path}, {dirs}, {files}")
+
+        assert write_sd_fs_conn.find(root_sd_path, 1) != write_sd_fs_conn.ls(
+            root_sd_path, False
+        )
+
+        assert write_sd_fs_conn.find(location) == write_sd_fs_conn.ls(location, False)
+
+        du = write_sd_fs_conn.du(root_sd_path + "/new_location/test.txt")
+        assert du == 15
+
+        size_details = write_sd_fs_conn.du(
+            root_sd_path + "/new_location", False, 2, True
+        )
+        assert len(size_details) == 2
+        result_location = list(size_details.keys())[-1]
+        assert "sd://" not in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_tenant in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_subproject in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_dataset_name in result_location
+
+        logger.info(f"------------ /new_location/test.txt du: {du}")
+
+        glob_res = write_sd_fs_conn.glob(root_sd_path + "/new_loc*")
+        assert len(glob_res) == 1
+        glob_res = write_sd_fs_conn.glob(root_sd_path + "/**")
+        assert len(glob_res) == 2
+
+        result_location = glob_res[-1]
+        assert "sd://" not in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_tenant in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_subproject in result_location
+        assert write_sd_fs_conn._seismic_dms_client._sd_dataset_name in result_location
+
+        logger.info(f"------------ /new_loc** glob: {glob_res}")
+
+        assert write_sd_fs_conn.exists(location)
+        assert not write_sd_fs_conn.exists(root_sd_path + "/test.txt")
+        assert write_sd_fs_conn.lexists(location)
+
+        assert write_sd_fs_conn.info(location)["type"] == "file"
+
+        checksum = write_sd_fs_conn.checksum(location)
+        logger.info(f"------------ test.txt checksum: {checksum}")
+        assert checksum
+
+        size = write_sd_fs_conn.size(location)
+        logger.info(f"------------ test.txt size: {size}")
+
+        sizes = write_sd_fs_conn.sizes([location, location])
+        assert sizes == [15, 15]
+
+        expand_path_res = write_sd_fs_conn.expand_path(
+            [root_sd_path + "/*"], recursive=True, maxdepth=2
+        )
+        assert len(expand_path_res) == 2
+        assert expand_path_res == glob_res
+
+    def test_file_management(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test transaction management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        sd_file_location = root_sd_path + "/test.txt"
+
+        write_sd_fs_conn.write_text(
+            sd_file_location,
+            "Hello World!",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+
+        result = write_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "Hello World!"
+
+        assert write_sd_fs_conn.read_block(sd_file_location, 6, 5) == b"World"
+
+        write_sd_fs_conn.pipe_file(sd_file_location, bytes("Happy New Year!", "utf-8"))
+        result = write_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        assert result == "Happy New Year!"
+
+        write_sd_fs_conn.pipe({sd_file_location: bytes("Happy Birthday!", "utf-8")})
+        result = write_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(
+            f"------------ rewrite (pipe) existing text.txt with new bytes: {result}"
+        )
+        assert result == "Happy Birthday!"
+
+        bytes_result = write_sd_fs_conn.cat_file(sd_file_location, start=0, end=5)
+        logger.info(
+            f"------------ cat text.txt content from byte 1 to 5: {bytes_result}"
+        )
+        assert bytes_result == b"Happy"
+
+        bytes_results = write_sd_fs_conn.cat_ranges(
+            paths=[sd_file_location, sd_file_location],
+            starts=[0, 6],
+            ends=[5, 14],
+        )
+        assert bytes_results[0] == b"Happy"
+        assert bytes_results[-1] == b"Birthday"
+
+        bytes_results = write_sd_fs_conn.cat([sd_file_location, sd_file_location])
+
+        bytes_results = [value for _key, value in bytes_results.items()]
+        assert bytes_results[0] == b"Happy Birthday!"
+        assert bytes_results[-1] == b"Happy Birthday!"
+
+        write_sd_fs_conn.get_file(sd_file_location, DOWNLOAD_FILE_PATH)
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+        Path("tests/download_test.txt").unlink()
+
+        write_sd_fs_conn.get([sd_file_location], [DOWNLOAD_FILE_PATH])
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+        put_file_path = root_sd_path + "/test2.txt"
+        write_sd_fs_conn.put_file(LOCAL_FILE_PATH, put_file_path)
+
+        put_path_one = root_sd_path + "/sub1/test.txt"
+        put_path_two = root_sd_path + "/sub2/test.txt"
+
+        write_sd_fs_conn.put(
+            [LOCAL_FILE_PATH, LOCAL_FILE_PATH], [put_path_one, put_path_two]
+        )
+
+        put_file_result = write_sd_fs_conn.read_text(
+            put_file_path, encoding=None, errors=None, newline="\n"
+        )
+
+        put_one_result = write_sd_fs_conn.read_text(
+            put_path_one, encoding=None, errors=None, newline="\n"
+        )
+
+        assert put_file_result == put_one_result
+
+        assert write_sd_fs_conn.head(put_path_one) == write_sd_fs_conn.tail(
+            put_path_one
+        )
+        assert write_sd_fs_conn.head(put_path_two) == write_sd_fs_conn.tail(
+            put_path_two
+        )
+
+        write_sd_fs_conn.cp_file(sd_file_location, root_sd_path + "/test3.txt")
+
+        result_3 = write_sd_fs_conn.read_text(
+            root_sd_path + "/test3.txt", encoding=None, errors=None, newline="\n"
+        )
+
+        logger.info(f"------------ read text3.txt content: {result_3}")
+        assert result_3 == "Happy Birthday!"
+
+        write_sd_fs_conn.copy(
+            [root_sd_path + "/test3.txt"], [root_sd_path + "/test4.txt"]
+        )
+
+        result_4 = write_sd_fs_conn.read_text(
+            root_sd_path + "/test4.txt", encoding=None, errors=None, newline="\n"
+        )
+        assert result_3 == result_4
+
+        write_sd_fs_conn.mv(sd_file_location, root_sd_path + "/new_location/test.txt")
+
+        result = write_sd_fs_conn.read_text(
+            root_sd_path + "/new_location/test.txt",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+        logger.info(f"------------ read /new_location/test.txt content: {result}")
+        assert result == result_3 == result_4
+
+        write_sd_fs_conn.rm_file(root_sd_path + "/test3.txt")
+        write_sd_fs_conn._rm(root_sd_path + "/test4.txt")
+        write_sd_fs_conn.rm(
+            [root_sd_path + "/sub1/test.txt", root_sd_path + "/sub2/test.txt"]
+        )
+
+        # ----------------------------------------------------
+
+        test3_is_file = write_sd_fs_conn.isfile(root_sd_path + "/test3.txt")
+        is_file = write_sd_fs_conn.isfile(root_sd_path + "/sub1/test.txt")
+        logger.info(f"------------ is test3.txt a file?: {is_file}")
+        assert not test3_is_file
+        assert not is_file
+
+        res = write_sd_fs_conn.glob(root_sd_path + "/**")
+        assert len(res) == 3
+
+        # ----------------------------------------------------
+
+        assert write_sd_fs_conn._open(root_sd_path + "/new_location/test.txt").info()
+        assert write_sd_fs_conn.open(root_sd_path + "/new_location/test.txt").info()
+
+        write_sd_fs_conn.touch(root_sd_path + "touch.txt")
+        assert write_sd_fs_conn.ukey(root_sd_path + "touch.txt")
+
+        assert isinstance(
+            write_sd_fs_conn.created(root_sd_path + "touch.txt"), datetime
+        )
+        assert isinstance(
+            write_sd_fs_conn.modified(root_sd_path + "/new_location/test.txt"), datetime
+        )
+
+    def test_sd_file_system_upload(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 1 'upload objects'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        put_file_path = root_sd_path + "/test.txt"
+        write_sd_fs_conn.put_file(LOCAL_FILE_PATH, put_file_path, None)
+
+        put_path_one = root_sd_path + "/sub1/test.txt"
+        put_path_two = root_sd_path + "/sub2/test.txt"
+
+        write_sd_fs_conn.put(
+            [LOCAL_FILE_PATH, LOCAL_FILE_PATH], [put_path_one, put_path_two]
+        )
+
+        # alias of put
+        upload_path = root_sd_path + "/test2.txt"
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            upload_path,
+        )
+
+        # works only on one level
+        result_list = write_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 4
+        assert result_list[0] in root_sd_path + "/sub1"
+        assert result_list[1] in root_sd_path + "/sub2"
+        assert result_list[2] in put_file_path
+        assert result_list[-1] in upload_path
+
+        result_list = write_sd_fs_conn.find(root_sd_path)
+        assert len(result_list) == 4
+        assert result_list[0] in put_path_one
+        assert result_list[1] in put_path_two
+        assert result_list[2] in put_file_path
+        assert result_list[-1] in upload_path
+
+    def test_sd_file_system_crd(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 2 'upload many, delete some and list'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test2.txt",
+        )
+
+        result_list = write_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 2
+        assert "/test.txt" in result_list[0]
+        assert "/test2.txt" in result_list[-1]
+
+        write_sd_fs_conn.rm_file(root_sd_path + "/test.txt")
+
+        result_list = write_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 1
+        assert "/test2.txt" in result_list[-1]
+
+    def test_sd_file_system_upload_and_download(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 3 'upload and download'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_sd_fs_conn.download(root_sd_path + "/test.txt", DOWNLOAD_FILE_PATH)
+
+        result = write_sd_fs_conn.read_text(
+            root_sd_path + "/test.txt", encoding=None, errors=None, newline="\n"
+        )
+
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+    def test_update_dataset_metadata(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Testing update dataset metadata impl.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "new_location/test.txt",
+        )
+
+        write_sd_fs_conn.update_dataset_metadata()
+        metadata = write_sd_fs_conn._get_seismic_dms_client().get_dataset_metadata(True)
+
+        logger.info("Added 2 files with common size 30 bytes")
+
+        assert isinstance(metadata, dict)
+        assert "type" in metadata
+        assert metadata["type"] == "GENERIC"
+
+        assert "nobjects" in metadata
+        assert metadata["nobjects"] == 2
+
+        assert "size" in metadata
+        assert metadata["size"] == 30
+
+        write_sd_fs_conn.pipe_file(
+            root_sd_path + "new_location/test.txt", bytes("New content", "utf-8")
+        )
+
+        write_sd_fs_conn.update_dataset_metadata()
+        metadata = write_sd_fs_conn._get_seismic_dms_client().get_dataset_metadata(True)
+
+        logger.info("File content updated")
+
+        assert "size" in metadata
+        assert metadata["size"] == 26
+
+        write_sd_fs_conn.rm_file(root_sd_path + "/test.txt")
+
+        write_sd_fs_conn.update_dataset_metadata()
+        metadata = write_sd_fs_conn._get_seismic_dms_client().get_dataset_metadata(True)
+
+        logger.info("File removed")
+
+        assert "nobjects" in metadata
+        assert metadata["nobjects"] == 1
+
+        assert "size" in metadata
+        assert metadata["size"] == 11
+
+    def test_returns_dictionary(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test audit method.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test2.txt",
+        )
+        result = write_sd_fs_conn.dataset_audit()
+
+        assert isinstance(result, dict)
+        assert "nobjects" in result
+        assert result["nobjects"] == 2
+
+        assert "size" in result
+        assert result["size"] == 30
diff --git a/tests/integration/test_seismic_dms_client_integration.py b/tests/integration/test_seismic_dms_client_integration.py
new file mode 100644
index 0000000..b2cacb1
--- /dev/null
+++ b/tests/integration/test_seismic_dms_client_integration.py
@@ -0,0 +1,128 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestSeismicDmsClientIntegration."""
+import os
+
+import pytest
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+from sdfs.exceptions import AuthStrategyError
+
+# osdu auth
+AUTH_REFRESH_TOKEN_URL = os.environ.get(
+    "AUTH_REFRESH_TOKEN_URL", "https://oauth2.googleapis.com/token"
+)
+ACCESS_TOKEN = os.environ.get("ACCESS_TOKEN", None)
+REFRESH_TOKEN = os.environ.get("REFRESH_TOKEN", None)
+CLIENT_ID = os.environ.get("CLIENT_ID", None)
+CLIENT_SECRET = os.environ.get("CLIENT_SECRET", None)
+# seismic service
+SD_PATH = os.environ.get(
+    "SD_PATH",
+    "sd://osdu/osdu-mdio/autotest_path/integration",
+)
+SEISMIC_STORE_URL = os.environ.get(
+    "SEISMIC_STORE_URL",
+    "https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3",
+)
+
+
+class TestSeismicDmsClientIntegration:
+    """TestSeismicDmsClientIntegration."""
+
+    def test_access_token_use_case(self) -> None:
+        """Validate access token use case.
+
+        As an OSDU user, having a fresly generated access_token
+        and sdpath pointing on an existing seismic dataset.
+        I create an instance of SeismicDmsClient with the following args
+        (sdpath, seismic_dms_url, access_token, write_access=False).
+        """
+        assert SD_PATH
+        assert SEISMIC_STORE_URL
+        assert ACCESS_TOKEN
+
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=SD_PATH,
+            seismic_dms_url=SEISMIC_STORE_URL,
+            access_token=ACCESS_TOKEN,
+        )
+
+        assert seismic_dms_client._dataset_properties["gcsurl"]
+
+        downscoped_credentials = seismic_dms_client.get_downscoped_credentials(False)
+        assert downscoped_credentials["access_token"]
+
+        with pytest.raises(
+            AuthStrategyError, match="This auth strategy don't support this method"
+        ):
+            seismic_dms_client.refresh_token()
+
+    def test_refresh_token_use_case(self) -> None:
+        """Validate refresh token use case.
+
+        As an OSDU user, having a freshly generated refresh_token
+        and sdpath pointing on an existing seismic dataset.
+        I create an instance of SeismicDmsClient with the following args
+        (
+            sdpath,
+            seismic_dms_url,
+            refresh_url,
+            refresh_token,
+            client_id,
+            client_secret,
+            write_access=False
+        )
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=SD_PATH,
+            seismic_dms_url=SEISMIC_STORE_URL,
+            refresh_url=AUTH_REFRESH_TOKEN_URL,
+            refresh_token=REFRESH_TOKEN,
+            client_id=CLIENT_ID,
+            client_secret=CLIENT_SECRET,
+        )
+
+        dataset_properties = seismic_dms_client.get_dataset_properties(False)
+        assert dataset_properties["gcsurl"]
+
+        downscoped_credentials = seismic_dms_client.get_downscoped_credentials(False)
+        assert downscoped_credentials["access_token"]
+
+        assert seismic_dms_client._access_token != seismic_dms_client.refresh_token()
+
+    def test_dataset_metadata_management_use_case(self, root_sd_path: str) -> None:
+        """Testing patch dataset metadata."""
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=root_sd_path,
+            seismic_dms_url=SEISMIC_STORE_URL,
+            refresh_url=AUTH_REFRESH_TOKEN_URL,
+            refresh_token=REFRESH_TOKEN,
+            client_id=CLIENT_ID,
+            client_secret=CLIENT_SECRET,
+        )
+
+        seismic_dms_client.patch_dataset_metadata({"test": "test"})
+
+        dataset_metadata = seismic_dms_client.get_dataset_metadata(True)
+        assert "test" in dataset_metadata
+
+        seismic_dms_client.patch_dataset_metadata({"type": "GENERIC"})
+
+        dataset_metadata = seismic_dms_client.get_dataset_metadata(True)
+        assert "type" in dataset_metadata
+        assert dataset_metadata["type"] == "GENERIC"
diff --git a/tests/integration/test_single_file_sd_integration.py b/tests/integration/test_single_file_sd_integration.py
new file mode 100644
index 0000000..6ebbfdd
--- /dev/null
+++ b/tests/integration/test_single_file_sd_integration.py
@@ -0,0 +1,552 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestSDFileSystemIntegration."""
+import json
+import logging
+import os
+from datetime import datetime
+from pathlib import Path
+
+import pytest
+from sdfs import SDFileSystem
+
+TEST_SD_PATH = os.environ.get("SD_PATH", "")
+LOCAL_FILE_PATH = "tests/local_test.txt"
+DOWNLOAD_FILE_PATH = "tests/download_test.txt"
+
+logger = logging.getLogger(__name__)
+
+
+class TestSDFileSystemIntegration:
+    """TestSDFileSystemIntegration."""
+
+    def test_sd_file_system_exist(self, write_single_sd_fs_conn: SDFileSystem) -> None:
+        """Test SD file system exist in this exec context.
+
+        Args:
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        assert write_single_sd_fs_conn
+        sd_json = write_single_sd_fs_conn.to_json()
+        assert isinstance(write_single_sd_fs_conn.from_json(sd_json), SDFileSystem)
+
+        sd_options = json.loads(sd_json)
+        assert sd_options["cls"] == "sdfs.core.SDFileSystem"
+        assert sd_options["protocol"] == "sd"
+        assert TEST_SD_PATH in sd_options["sdpath"]
+
+    def test_transaction_management(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test transaction management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        logger.info("------------ start transaction:")
+        write_single_sd_fs_conn.start_transaction()
+        logger.info(f"active?: {write_single_sd_fs_conn.transaction.fs._intrans}")
+        assert write_single_sd_fs_conn.transaction.fs._intrans
+
+        write_single_sd_fs_conn.write_text(
+            root_sd_path + "/test.txt",
+            "I love you!",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+
+        logger.info("------------ before end transaction:")
+        logger.info(f"files: {write_single_sd_fs_conn.transaction.files}")
+
+        assert len(write_single_sd_fs_conn.transaction.files) == 1
+        assert "test.txt" in str(write_single_sd_fs_conn.transaction.files)
+
+        logger.info("------------ end transaction:")
+        write_single_sd_fs_conn.end_transaction()
+        logger.info(f"active?: {write_single_sd_fs_conn.transaction.fs._intrans}")
+        assert not write_single_sd_fs_conn.transaction.fs._intrans
+
+        logger.info("------------ text.txt some info:")
+        current_size = write_single_sd_fs_conn.info(root_sd_path + "/test.txt")["size"]
+        logger.info(f"size: {current_size}")
+        assert current_size == 11
+
+        result = write_single_sd_fs_conn.read_text(
+            root_sd_path + "/test.txt", encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "I love you!"
+
+    def test_dir_management(
+        self, root_sd_path: str, write_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test dir management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        location = root_sd_path + "/new_location/test.txt"
+
+        write_sd_fs_conn.upload(LOCAL_FILE_PATH, location, None)
+
+        with pytest.raises(OSError, match="Forbidden"):
+            write_sd_fs_conn.mkdir(root_sd_path + "/test_location/gc/v2")
+
+        assert write_sd_fs_conn.isdir(root_sd_path + "/new_location")
+        object_type = write_sd_fs_conn.info(root_sd_path + "/new_location")["type"]
+        logger.info(f"------------ /test_location type: {object_type}")
+        assert object_type == "directory"
+
+        logger.info("------------ removing text file")
+        write_sd_fs_conn.rm_file(location)
+
+        assert not write_sd_fs_conn.isdir(root_sd_path + "/new_location")
+
+    def test_sd_file_system_interface(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test SD file system interface.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        location = root_sd_path + "/new_location/test.txt"
+
+        write_single_sd_fs_conn.upload(LOCAL_FILE_PATH, location, None)
+
+        result = write_single_sd_fs_conn.read_text(
+            location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "Hello From SDFS"
+
+        result_list = write_single_sd_fs_conn.ls(root_sd_path)
+        assert len(result_list) == 1
+        assert "tenant" in result_list[0]
+
+        result_list = write_single_sd_fs_conn.ls(root_sd_path)
+
+        assert not write_single_sd_fs_conn._ls_from_cache(root_sd_path)
+
+        for idx, data in enumerate(write_single_sd_fs_conn.walk(location)):
+            assert idx < 1
+            [path, dirs, files] = data
+
+            assert "new_location" in path
+            assert len(dirs) == 0
+            assert len(files) == 1
+
+            logger.info(f"------------ walk {location}: {path}, {dirs}, {files}")
+
+        assert write_single_sd_fs_conn.find(root_sd_path, 1) != write_single_sd_fs_conn.ls(
+            root_sd_path, False
+        )
+
+        assert write_single_sd_fs_conn.find(location) == write_single_sd_fs_conn.ls(location, False)
+
+        du = write_single_sd_fs_conn.du(root_sd_path + "/new_location/test.txt")
+        assert du == 15
+
+        size_details = write_single_sd_fs_conn.du(
+            root_sd_path + "/new_location", False, 2, True
+        )
+        assert len(size_details) == 2
+        result_location = list(size_details.keys())[-1]
+        assert "sd://" not in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_tenant in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_subproject in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_dataset_name in result_location
+
+        logger.info(f"------------ /new_location/test.txt du: {du}")
+
+        glob_res = write_single_sd_fs_conn.glob(root_sd_path + "/new_loc*")
+        assert len(glob_res) == 1
+        glob_res = write_single_sd_fs_conn.glob(root_sd_path + "/**")
+        assert len(glob_res) == 2
+
+        result_location = glob_res[-1]
+        assert "sd://" not in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_tenant in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_subproject in result_location
+        assert write_single_sd_fs_conn._seismic_dms_client._sd_dataset_name in result_location
+
+        logger.info(f"------------ /new_loc** glob: {glob_res}")
+
+        assert write_single_sd_fs_conn.exists(location)
+        assert not write_single_sd_fs_conn.exists(root_sd_path + "/test.txt")
+        assert write_single_sd_fs_conn.lexists(location)
+
+        assert write_single_sd_fs_conn.info(location)["type"] == "segmented_file"
+
+        checksum = write_single_sd_fs_conn.checksum(location)
+        logger.info(f"------------ test.txt checksum: {checksum}")
+        assert checksum
+
+        size = write_single_sd_fs_conn.size(location)
+        logger.info(f"------------ test.txt size: {size}")
+
+        sizes = write_single_sd_fs_conn.sizes([location, location])
+        assert sizes == [15, 15]
+
+        expand_path_res = write_single_sd_fs_conn.expand_path(
+            [root_sd_path + "/*"], recursive=True, maxdepth=2
+        )
+        assert len(expand_path_res) == 2
+        assert expand_path_res == glob_res
+
+    def test_file_management(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test transaction management.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        sd_file_location = root_sd_path + "/test.txt"
+
+        write_single_sd_fs_conn.write_text(
+            sd_file_location,
+            "Hello World!",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+
+        result = write_single_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(f"------------ read text.txt content: {result}")
+        assert result == "Hello World!"
+
+        assert write_single_sd_fs_conn.read_block(sd_file_location, 6, 5) == b"World"
+
+        write_single_sd_fs_conn.pipe_file(sd_file_location, bytes("Happy New Year!", "utf-8"))
+        result = write_single_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        assert result == "Happy New Year!"
+
+        write_single_sd_fs_conn.pipe({sd_file_location: bytes("Happy Birthday!", "utf-8")})
+        result = write_single_sd_fs_conn.read_text(
+            sd_file_location, encoding=None, errors=None, newline="\n"
+        )
+        logger.info(
+            f"------------ rewrite (pipe) existing text.txt with new bytes: {result}"
+        )
+        assert result == "Happy Birthday!"
+
+        bytes_result = write_single_sd_fs_conn.cat_file(sd_file_location, start=0, end=5)
+        logger.info(
+            f"------------ cat text.txt content from byte 1 to 5: {bytes_result}"
+        )
+        assert bytes_result == b"Happy"
+
+        bytes_results = write_single_sd_fs_conn.cat_ranges(
+            paths=[sd_file_location, sd_file_location],
+            starts=[0, 6],
+            ends=[5, 14],
+        )
+        assert bytes_results[0] == b"Happy"
+        assert bytes_results[-1] == b"Birthday"
+
+        bytes_results = write_single_sd_fs_conn.cat([sd_file_location, sd_file_location])
+
+        bytes_results = [value for _key, value in bytes_results.items()]
+        assert bytes_results[0] == b"Happy Birthday!"
+        assert bytes_results[-1] == b"Happy Birthday!"
+
+        write_single_sd_fs_conn.get_file(sd_file_location, DOWNLOAD_FILE_PATH)
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+        Path("tests/download_test.txt").unlink()
+
+        write_single_sd_fs_conn.get([sd_file_location], [DOWNLOAD_FILE_PATH])
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+        put_file_path = root_sd_path + "/test2.txt"
+        write_single_sd_fs_conn.put_file(LOCAL_FILE_PATH, put_file_path)
+
+        put_path_one = root_sd_path + "/sub1/test.txt"
+        put_path_two = root_sd_path + "/sub2/test.txt"
+
+        write_single_sd_fs_conn.put(
+            [LOCAL_FILE_PATH, LOCAL_FILE_PATH], [put_path_one, put_path_two]
+        )
+
+        put_file_result = write_single_sd_fs_conn.read_text(
+            put_file_path, encoding=None, errors=None, newline="\n"
+        )
+
+        put_one_result = write_single_sd_fs_conn.read_text(
+            put_path_one, encoding=None, errors=None, newline="\n"
+        )
+
+        assert put_file_result == put_one_result
+
+        assert write_single_sd_fs_conn.head(put_path_one) == write_single_sd_fs_conn.tail(
+            put_path_one
+        )
+        assert write_single_sd_fs_conn.head(put_path_two) == write_single_sd_fs_conn.tail(
+            put_path_two
+        )
+
+        write_single_sd_fs_conn.cp_file(sd_file_location, root_sd_path + "/test3.txt")
+
+        result_3 = write_single_sd_fs_conn.read_text(
+            root_sd_path + "/test3.txt", encoding=None, errors=None, newline="\n"
+        )
+
+        logger.info(f"------------ read text3.txt content: {result_3}")
+        assert result_3 == "Happy Birthday!"
+
+        write_single_sd_fs_conn.copy(
+            [root_sd_path + "/test3.txt"], [root_sd_path + "/test4.txt"]
+        )
+
+        result_4 = write_single_sd_fs_conn.read_text(
+            root_sd_path + "/test4.txt", encoding=None, errors=None, newline="\n"
+        )
+        assert result_3 == result_4
+
+        write_single_sd_fs_conn.mv(sd_file_location, root_sd_path + "/new_location/test.txt")
+
+        result = write_single_sd_fs_conn.read_text(
+            root_sd_path + "/new_location/test.txt",
+            encoding=None,
+            errors=None,
+            newline="\n",
+        )
+        logger.info(f"------------ read /new_location/test.txt content: {result}")
+        assert result == result_3 == result_4
+
+        write_single_sd_fs_conn.rm_file(root_sd_path + "/test3.txt")
+        write_single_sd_fs_conn._rm(root_sd_path + "/test4.txt")
+        write_single_sd_fs_conn.rm(
+            [root_sd_path + "/sub1/test.txt", root_sd_path + "/sub2/test.txt"]
+        )
+
+        # ----------------------------------------------------
+
+        test3_is_file = write_single_sd_fs_conn.isfile(root_sd_path + "/test3.txt")
+        is_file = write_single_sd_fs_conn.isfile(root_sd_path + "/sub1/test.txt")
+        logger.info(f"------------ is test3.txt a file?: {is_file}")
+        assert not test3_is_file
+        assert not is_file
+
+        res = write_single_sd_fs_conn.glob(root_sd_path + "/**")
+        assert len(res) == 3
+
+        # ----------------------------------------------------
+
+        assert write_single_sd_fs_conn._open(root_sd_path + "/new_location/test.txt").info()
+        assert write_single_sd_fs_conn.open(root_sd_path + "/new_location/test.txt").info()
+
+        write_single_sd_fs_conn.touch(root_sd_path + "touch.txt")
+        assert write_single_sd_fs_conn.ukey(root_sd_path + "touch.txt")
+
+        assert isinstance(
+            write_single_sd_fs_conn.created(root_sd_path + "touch.txt"), datetime
+        )
+        assert isinstance(
+            write_single_sd_fs_conn.modified(root_sd_path + "/new_location/test.txt"), datetime
+        )
+
+    def test_sd_file_system_upload(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 1 'upload objects'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        put_file_path = root_sd_path + "/test.txt"
+        write_single_sd_fs_conn.put_file(LOCAL_FILE_PATH, put_file_path, None)
+
+        put_path_one = root_sd_path + "/sub1/test.txt"
+        put_path_two = root_sd_path + "/sub2/test.txt"
+
+        write_single_sd_fs_conn.put(
+            [LOCAL_FILE_PATH, LOCAL_FILE_PATH], [put_path_one, put_path_two]
+        )
+
+        # alias of put
+        upload_path = root_sd_path + "/test2.txt"
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            upload_path,
+        )
+
+        # works only on one level
+        result_list = write_single_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 4
+        assert result_list[0] in root_sd_path + "/sub1"
+        assert result_list[1] in root_sd_path + "/sub2"
+        assert result_list[2] in put_file_path
+        assert result_list[-1] in upload_path
+
+        result_list = write_single_sd_fs_conn.find(root_sd_path)
+        assert len(result_list) == 4
+        assert result_list[0] in put_path_one
+        assert result_list[1] in put_path_two
+        assert result_list[2] in put_file_path
+        assert result_list[-1] in upload_path
+
+    def test_sd_file_system_crd(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 2 'upload many, delete some and list'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test2.txt",
+        )
+
+        result_list = write_single_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 2
+        assert "/test.txt" in result_list[0]
+        assert "/test2.txt" in result_list[-1]
+
+        write_single_sd_fs_conn.rm_file(root_sd_path + "/test.txt")
+
+        result_list = write_single_sd_fs_conn.ls(root_sd_path, False)
+        assert len(result_list) == 1
+        assert "/test2.txt" in result_list[-1]
+
+    def test_sd_file_system_upload_and_download(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Use Case 3 'upload and download'.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_single_sd_fs_conn.download(root_sd_path + "/test.txt", DOWNLOAD_FILE_PATH)
+
+        result = write_single_sd_fs_conn.read_text(
+            root_sd_path + "/test.txt", encoding=None, errors=None, newline="\n"
+        )
+
+        with Path(DOWNLOAD_FILE_PATH).open() as f:
+            assert f.read() == result
+
+    def test_update_dataset_metadata(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Testing update dataset metadata impl.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "new_location/test.txt",
+        )
+
+        write_single_sd_fs_conn.update_dataset_metadata()
+        metadata = write_single_sd_fs_conn._get_seismic_dms_client(
+        ).get_dataset_metadata(True)
+
+        logger.info("Added 2 files with common size 30 bytes")
+
+        assert isinstance(metadata, dict)
+        assert "type" in metadata
+        assert metadata["type"] == "GENERIC"
+
+        assert "nobjects" in metadata
+        assert metadata["nobjects"] == 2
+
+        assert "size" in metadata
+        assert metadata["size"] == 30
+
+        write_single_sd_fs_conn.pipe_file(
+            root_sd_path + "new_location/test.txt", bytes("New content", "utf-8")
+        )
+
+        write_single_sd_fs_conn.update_dataset_metadata()
+        metadata = write_single_sd_fs_conn._get_seismic_dms_client().get_dataset_metadata(True)
+
+        logger.info("File content updated")
+
+        assert "size" in metadata
+        assert metadata["size"] == 26
+
+        write_single_sd_fs_conn.rm_file(root_sd_path + "/test.txt")
+
+        write_single_sd_fs_conn.update_dataset_metadata()
+        metadata = write_single_sd_fs_conn._get_seismic_dms_client().get_dataset_metadata(True)
+
+        logger.info("File removed")
+
+        assert "nobjects" in metadata
+        assert metadata["nobjects"] == 1
+
+        assert "size" in metadata
+        assert metadata["size"] == 11
+
+    def test_returns_dictionary(
+        self, root_sd_path: str, write_single_sd_fs_conn: SDFileSystem
+    ) -> None:
+        """Test audit method.
+
+        Args:
+            root_sd_path (str): root sd dataset url
+            write_single_sd_fs_conn (SDFileSystem): SDFileSystem configured to write access
+        """
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test.txt",
+        )
+        write_single_sd_fs_conn.upload(
+            LOCAL_FILE_PATH,
+            root_sd_path + "/test2.txt",
+        )
+        result = write_single_sd_fs_conn.dataset_audit()
+
+        assert isinstance(result, dict)
+        assert "nobjects" in result
+        assert result["nobjects"] == 2
+
+        assert "size" in result
+        assert result["size"] == 30
diff --git a/tests/local_test.txt b/tests/local_test.txt
new file mode 100644
index 0000000..3710513
--- /dev/null
+++ b/tests/local_test.txt
@@ -0,0 +1 @@
+Hello From SDFS
\ No newline at end of file
diff --git a/tests/test_retry_flow.py b/tests/test_retry_flow.py
new file mode 100644
index 0000000..8ba3ae5
--- /dev/null
+++ b/tests/test_retry_flow.py
@@ -0,0 +1,102 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Retry flow integration test."""
+import logging
+import os
+import time
+
+from sdfs import SDFileSystem
+
+logger = logging.getLogger(__name__)
+
+# osdu auth
+AUTH_REFRESH_TOKEN_URL = os.environ.get(
+    "AUTH_REFRESH_TOKEN_URL", "https://oauth2.googleapis.com/token"
+)
+ACCESS_TOKEN = os.environ.get("ACCESS_TOKEN", None)
+REFRESH_TOKEN = os.environ.get("REFRESH_TOKEN", None)
+CLIENT_ID = os.environ.get("CLIENT_ID", None)
+CLIENT_SECRET = os.environ.get("CLIENT_SECRET", None)
+# seismic service
+SD_PATH = os.environ.get(
+    "SD_PATH",
+    "sd://osdu/osdu-mdio/autotest_path/integration",
+)
+SEISMIC_STORE_URL = os.environ.get(
+    "SEISMIC_STORE_URL",
+    "https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3",
+)
+
+# timing data
+MAX_MINUTES_COUNT = 180
+STEP = 10
+
+assert SEISMIC_STORE_URL
+assert REFRESH_TOKEN
+assert AUTH_REFRESH_TOKEN_URL
+assert CLIENT_ID
+assert CLIENT_SECRET
+
+sdfs = SDFileSystem(
+    sdpath=SD_PATH,
+    seismic_dms_url=SEISMIC_STORE_URL,
+    write_access=True,
+    refresh_token=REFRESH_TOKEN,
+    refresh_url=AUTH_REFRESH_TOKEN_URL,
+    client_id=CLIENT_ID,
+    client_secret=CLIENT_SECRET,
+)
+
+if __name__ == "__main__":
+    idx = 0
+    while idx < 60 * MAX_MINUTES_COUNT:
+        if (
+            sdfs._get_seismic_dms_client()._session_statistics["access_token_refreshes"]
+            > 1
+            and sdfs._get_seismic_dms_client()._session_statistics[
+                "downscoped_credentials_updates"
+            ]
+            > 1
+        ):
+            break
+
+        result = sdfs.find(SD_PATH + "/non_existing_directory", 1)
+
+        print(f"step index: {idx}, find: {result}")
+        time.sleep(60 * STEP)
+        idx += 60 * STEP
+
+    assert (
+        sdfs._get_seismic_dms_client()._session_statistics["access_token_refreshes"] > 1
+        and sdfs._get_seismic_dms_client()._session_statistics[
+            "downscoped_credentials_updates"
+        ]
+        > 1
+    ), f"Retry flow failed in {idx / 60} minutes due to a non-handled exception.\
+ Completed retries are: access_token_refreshes -\
+ {sdfs._get_seismic_dms_client()._session_statistics['access_token_refreshes']},\
+ downscoped_credentials_updates -\
+ {sdfs._get_seismic_dms_client()._session_statistics['downscoped_credentials_updates']}"
+
+    print(
+        f"Test successfully finished in {idx / 60} minutes.\
+ Completed retries: access_token_refreshes -\
+ {sdfs._get_seismic_dms_client()._session_statistics['access_token_refreshes']},\
+ downscoped_credentials_updates -\
+ {sdfs._get_seismic_dms_client()._session_statistics['downscoped_credentials_updates']}"
+    )
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..17b9af9
--- /dev/null
+++ b/tests/unit/__init__.py
@@ -0,0 +1,18 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Unit tests for the SDFS package."""
diff --git a/tests/unit/providers/__init__.py b/tests/unit/providers/__init__.py
new file mode 100644
index 0000000..1476c6a
--- /dev/null
+++ b/tests/unit/providers/__init__.py
@@ -0,0 +1,18 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Unit tests for providers package."""
diff --git a/tests/unit/providers/conftest.py b/tests/unit/providers/conftest.py
new file mode 100644
index 0000000..fa5f000
--- /dev/null
+++ b/tests/unit/providers/conftest.py
@@ -0,0 +1,41 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Providers tests shared fixtures."""
+from typing import Generator
+from unittest import mock
+
+import pytest
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+
+from tests.unit.seismic_dms_client_mocks import mock_osdu_request
+
+MOCK_ACCESS_TOKEN = "MOCK_ACCESS_TOKEN"  # noqa: S105
+MOCK_SEISMIC_STORE_URL = "https://mdio.test/api/seismic-store/v3"
+MOCK_SD_PATH = "sd://osdu/osdu-mdio/dataset-1-segy/ST0202R08-PS_PSDM_FULL_OFFSET_DEPTH.MIG_FIN.POST_Stack.3D.JS-017534.segy"  # noqa: B950
+
+
+@pytest.fixture()
+def seismic_dms_client() -> Generator:
+    """Returns SeismicDmsClient obj with built with data consumed by a mocked API."""
+    with mock.patch("requests.request", side_effect=mock_osdu_request):
+        seismic_dms_instance = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+        yield seismic_dms_instance
diff --git a/tests/unit/providers/test_factory.py b/tests/unit/providers/test_factory.py
new file mode 100644
index 0000000..1f9f529
--- /dev/null
+++ b/tests/unit/providers/test_factory.py
@@ -0,0 +1,105 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestProviderSpecificFactory."""
+from unittest import mock
+
+import pytest
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+from sdfs.exceptions import ServiceProviderCodeNotSpecifiedError, ValidationError
+from sdfs.providers import (
+    AnthosSpecific,
+    AwsSpecific,
+    AzureSpecific,
+    GoogleSpecific,
+    IbmSpecific,
+    ProviderSpecificFactory,
+)
+
+
+class TestProviderSpecificFactory:
+    """TestProviderSpecificFactory."""
+
+    def test_wrong_factory_initialization(self) -> None:
+        """Test wrong factory init.
+
+        Test that an error is raised when initializing the factory without any
+        seismic_dms_client
+        """
+        with pytest.raises(ValidationError, match="seismic_dms_client is required"):
+            ProviderSpecificFactory(seismic_dms_client=None)
+
+    @mock.patch(
+        "sdfs.clients.seismic_dms_client.SeismicDmsClient.get_service_provider_code"
+    )
+    def test_retrieve_cloud_provider_without_a_code(
+        self, get_service_provider_code: mock.Mock, seismic_dms_client: SeismicDmsClient
+    ) -> None:
+        """Test that an error is raised when a provider is retrieved without a code."""
+        get_service_provider_code.return_value = None
+
+        factory = ProviderSpecificFactory(seismic_dms_client)
+        with pytest.raises(
+            ServiceProviderCodeNotSpecifiedError,
+            match="Service provider code was not specified",
+        ):
+            factory.get_provider_specific()
+
+    @mock.patch(
+        "sdfs.clients.seismic_dms_client.SeismicDmsClient.get_service_provider_code"
+    )
+    def test_retrieve_cloud_provider_with_invalid_code(
+        self, get_service_provider_code: mock.Mock, seismic_dms_client: SeismicDmsClient
+    ) -> None:
+        """Test retrieve cloud provider with invalid code.
+
+        Test that an error is raised when a provider is retrieved
+        with an invalid code.
+        """
+        code = "wrong_code"
+        get_service_provider_code.return_value = code
+
+        factory = ProviderSpecificFactory(seismic_dms_client)
+        with pytest.raises(ValidationError, match=f"{code} is not valid"):
+            factory.get_provider_specific()
+
+    @mock.patch(
+        "sdfs.clients.seismic_dms_client.SeismicDmsClient.get_service_provider_code"
+    )
+    def test_retrieve_cloud_provider_with_valid_code(
+        self, get_service_provider_code: mock.Mock, seismic_dms_client: SeismicDmsClient
+    ) -> None:
+        """Test retrieve cloud provider with valid code.
+
+        Test that all cloud providers are retrieved correctly
+        with their corresponding code.
+        """
+        codes = ["gc", "aws", "azure", "ibm", "anthos"]
+        classes = [
+            GoogleSpecific,
+            AwsSpecific,
+            AzureSpecific,
+            IbmSpecific,
+            AnthosSpecific,
+        ]
+
+        factory = ProviderSpecificFactory(seismic_dms_client)
+
+        for code, cloud_provider_class in zip(codes, classes):
+            get_service_provider_code.return_value = code
+            cloud_client_instance = factory.get_provider_specific()
+            assert isinstance(cloud_client_instance, cloud_provider_class)
diff --git a/tests/unit/providers/test_google_provider.py b/tests/unit/providers/test_google_provider.py
new file mode 100644
index 0000000..08a146e
--- /dev/null
+++ b/tests/unit/providers/test_google_provider.py
@@ -0,0 +1,83 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestGoogleSpecific."""
+from pathlib import Path
+from unittest import mock
+
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+from sdfs.providers import ProviderSpecificFactory
+
+from tests.unit.seismic_dms_client_mocks import (
+    MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA,
+    MOCK_DATASET_INFO_RESPONSE_DATA,
+)
+
+MOCK_SD_PATH = "sd://osdu/osdu-mdio/dataset-1-segy/ST0202R08-PS_PSDM_FULL_OFFSET_DEPTH.MIG_FIN.POST_Stack.3D.JS-017534.segy"  # noqa: B950
+
+
+class TestGoogleSpecific:
+    """TestGoogleSpecific."""
+
+    @mock.patch(
+        "sdfs.clients.seismic_dms_client.SeismicDmsClient.get_service_provider_code"
+    )
+    def test_get_storage_options(
+        self, get_service_provider_code: mock.Mock, seismic_dms_client: SeismicDmsClient
+    ) -> None:
+        """Test get storage options.
+
+        Test the creation of a dictionary with the downscoped credentials
+        required to connect with the storage service of a cloud provider.
+        """
+        get_service_provider_code.return_value = "gc"
+        factory = ProviderSpecificFactory(seismic_dms_client)
+        google_client = factory.get_provider_specific()
+        storage_options = google_client.get_storage_options()
+
+        expected = {
+            "token": MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA["access_token"]
+        }
+        assert storage_options == expected
+
+    @mock.patch(
+        "sdfs.clients.seismic_dms_client.SeismicDmsClient.get_service_provider_code"
+    )
+    def test_get_dataset_storage_url(
+        self, get_service_provider_code: mock.Mock, seismic_dms_client: SeismicDmsClient
+    ) -> None:
+        """Test get dataset storage url.
+
+        Test the creation of a url in a provider-specific format
+        for accessing a seismic dataset storage location.
+        """
+        get_service_provider_code.return_value = "gc"
+
+        factory = ProviderSpecificFactory(seismic_dms_client)
+        google_client = factory.get_provider_specific()
+
+        subpath_from_dataset_root = "hello_world.txt"
+        storage_url = google_client.get_dataset_storage_url(
+            subpath_from_dataset_root=subpath_from_dataset_root
+        )
+
+        expected = Path(
+            MOCK_DATASET_INFO_RESPONSE_DATA["gcsurl"],
+            subpath_from_dataset_root
+        ).as_posix()
+        expected = f"gs://{expected}"
+        assert storage_url == expected
diff --git a/tests/unit/sd_file_system_mocks.py b/tests/unit/sd_file_system_mocks.py
new file mode 100644
index 0000000..649ec34
--- /dev/null
+++ b/tests/unit/sd_file_system_mocks.py
@@ -0,0 +1,504 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Test MockFinalFileSystem for SDFS package."""
+
+from .singleton import Singleton
+
+
+class MockUnAuthorizedError(Exception):
+    """MockUnAuthorizedError."""
+
+    code = 401
+
+
+class MockFinalFileSystem(metaclass=Singleton):
+    """MockFinalFileSystem for python fsspec lib."""
+
+    _default_file_size = 5
+
+    def __init__(self) -> None:
+        """Init MockFinalFileSystem class."""
+        self._result = {}
+
+    def _track(self, key: str) -> None:
+        if key in self._result:
+            self._result[key] += 1
+        else:
+            self._result[key] = 1
+
+    def _strip_protocol(self, path: str) -> str:
+        return path.removesuffix("sd://")
+
+    @property
+    def fsid(self) -> None:
+        """File system mock operation."""
+        self._track("fsid")
+
+    @property
+    def transaction(self) -> None:
+        """File system mock operation.
+
+        Returns:
+            mock transaction
+        """
+        self._track("transaction")
+        return True
+
+    def start_transaction(self) -> None:
+        """File system mock operation."""
+        self._track("start_transaction")
+
+    def end_transaction(self) -> None:
+        """File system mock operation."""
+        self._track("end_transaction")
+
+    def invalidate_cache(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("invalidate_cache")
+
+    def mkdir(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("mkdir")
+
+    def makedirs(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("makedirs")
+
+    def rmdir(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("rmdir")
+
+    def ls(self, *_args: int, **_kwargs: int) -> list:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+
+        Returns:
+            result list
+        """
+        self._track("ls")
+        return []
+
+    def _ls_from_cache(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("_ls_from_cache")
+
+    def walk(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("walk")
+
+    def find(self, *_args: int, **_kwargs: int) -> list:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+
+        Returns:
+            result list
+        """
+        self._track("find")
+        return []
+
+    def du(self, *_args: int, **kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **kwargs: mock kwargs.
+        """
+        self._track("du")
+        if "total" in kwargs and kwargs["total"]:
+            return self._default_file_size
+
+        return {"test.txt": self._default_file_size}
+
+    def glob(self, *_args: int, **_kwargs: int) -> list:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+
+        Returns:
+            result list
+        """
+        self._track("glob")
+        return []
+
+    def expand_path(self, *_args: int, **_kwargs: int) -> list:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+
+        Returns:
+            result list
+        """
+        self._track("expand_path")
+        return []
+
+    def exists(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("exists")
+
+    def lexists(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("lexists")
+
+    def info(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("info")
+
+    def checksum(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("checksum")
+
+    def size(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("size")
+
+    def sizes(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("sizes")
+
+    def isdir(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("isdir")
+
+    def isfile(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("isfile")
+
+    def write_text(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("write_text")
+
+    def read_text(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("read_text")
+
+    def read_block(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("read_block")
+
+    def pipe_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("pipe_file")
+
+    def pipe(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("pipe")
+
+    def cat_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("cat_file")
+
+    def cat_ranges(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("cat_ranges")
+
+    def cat(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("cat")
+
+    def get_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("get_file")
+
+    def get(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("get")
+
+    def put_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("put_file")
+
+    def put(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("put")
+
+    def head(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("head")
+
+    def tail(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("tail")
+
+    def cp_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("cp_file")
+
+    def copy(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("copy")
+
+    def mv(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("mv")
+
+    def rm_file(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+
+        Raises:
+            MockUnAuthorizedError: mock auth error
+        """
+        if "force_get_final_fs" not in self._result:
+            raise MockUnAuthorizedError
+
+        self._track("rm_file")
+
+    def rm(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("rm")
+
+    def _open(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("_open")
+
+    def open(self, *_args: int, **_kwargs: int) -> None:  # noqa: A003
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("open")
+
+    def touch(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("touch")
+
+    def ukey(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("ukey")
+
+    def created(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("created")
+
+    def modified(self, *_args: int, **_kwargs: int) -> None:
+        """File system mock operation.
+
+        Args:
+            *_args: mock args.
+            **_kwargs: mock kwargs.
+        """
+        self._track("modified")
+
+
+def mock_final_fs(force: bool = False) -> MockFinalFileSystem:
+    """Function which mock Final file system.
+
+    Args:
+        force (bool): force flag
+
+    Returns:
+        MockFinalFileSystem: return mocked file system
+    """
+    mock_fs = MockFinalFileSystem()
+    mock_fs._track("force_get_final_fs") if force else mock_fs._track("_get_final_fs")
+    return mock_fs
diff --git a/tests/unit/seismic_dms_client_mocks.py b/tests/unit/seismic_dms_client_mocks.py
new file mode 100644
index 0000000..ed7c87d
--- /dev/null
+++ b/tests/unit/seismic_dms_client_mocks.py
@@ -0,0 +1,123 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Test MockResponse for SDFS package."""
+from typing import Any
+
+MOCK_REFRESH_TOKEN_RESPONSE_DATA = {
+    "access_token": "MOCK_ACCESS_TOKEN",
+    "expires_in": 3599,
+    "scope": "https://www.googleapis.com/auth/userinfo.email openid",
+    "token_type": "Bearer",
+    "id_token": "MOCK_ID_TOKEN",
+}
+
+MOCK_DATASET_INFO_RESPONSE_DATA = {
+    "sbit_count": 0,
+    "last_modified_date": "Wed Nov 01 2023 14:13:29 GMT+0000 (Coordinated Universal Time)",  # noqa: E501
+    "created_by": "114828157792038308987",
+    "sbit": None,
+    "subproject": "osdu-mdio",
+    "path": "/autotest_path/",
+    "gcsurl": "or2-msq-tgs-mdio-t1iylu-osdu-ss-seismic/9ab7af3561731cda9/b00dd5346271",
+    "readonly": False,
+    "filemetadata": {
+        "md5Checksum": None,
+        "nobjects": 1,
+        "size": 895367300,
+        "type": "GENERIC",
+        "tier_class": None,
+    },
+    "name": "unit",
+    "ctag": "kLAFwFIF9TuWU5R9osdu;osdu",
+    "created_date": "Wed Nov 01 2023 14:10:57 GMT+0000 (Coordinated Universal Time)",
+    "ltag": "osdu-default-data-tag",
+    "tenant": "osdu",
+    "access_policy": "uniform",
+}
+
+MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA = {
+    "access_token": "MOCK_ACCESS_DOWNSCOPED_TOKEN",
+    "expires_in": 3599000,
+    "token_type": "Bearer",
+}
+
+
+class MockResponse:
+    """MockResponse for python requests lib."""
+
+    def __init__(self, json_data: dict, status_code: int) -> None:
+        """Init MockResponse class.
+
+        Args:
+            json_data (dict): fake json data
+            status_code (int): http status code
+        """
+        self.json_data = json_data
+        self.status_code = status_code
+
+    def json(self) -> dict:
+        """Fake json.
+
+        Returns:
+            dict: Fake json
+        """
+        return self.json_data
+
+    @property
+    def ok(self) -> bool:
+        """Fake Status.
+
+        Returns:
+            bool: Fake response status
+        """
+        return True
+
+    @property
+    def headers(self) -> dict:
+        """Fake Headers.
+
+        Returns:
+            dict: Fake response headers in pythonic dict
+        """
+        return {"service-provider": "gc"}
+
+
+def mock_osdu_request(*_args: tuple, **kwargs: dict[str, Any]) -> MockResponse:
+    """Function which mock OSDU request by args.
+
+    Args:
+        *_args: requests args.
+        **kwargs: requests kwargs.
+
+    Returns:
+        MockResponse: Fake mock response
+
+    Returns:
+        MockResponse: Fake success response
+    """
+    if "/token" in kwargs["url"]:
+        return MockResponse(MOCK_REFRESH_TOKEN_RESPONSE_DATA, 200)
+
+    if "/api/seismic-store/v3" in kwargs["url"]:
+        if "utility" in kwargs["url"]:
+            return MockResponse(MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA, 200)
+
+        if "dataset" in kwargs["url"]:
+            return MockResponse(MOCK_DATASET_INFO_RESPONSE_DATA, 200)
+
+    return MockResponse(None, 404)
diff --git a/tests/unit/singleton.py b/tests/unit/singleton.py
new file mode 100644
index 0000000..5a94c02
--- /dev/null
+++ b/tests/unit/singleton.py
@@ -0,0 +1,38 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""Singleton."""
+
+
+class Singleton(type):
+    """Singleton for effective mocking."""
+
+    _instances = {}
+
+    def __call__(cls, *args: int, **kwargs: int) -> dict:
+        """Singleton metaclass impl.
+
+        Args:
+            *args: instance args.
+            **kwargs: instance kwargs.
+
+        Returns:
+            dict: instances collection
+        """
+        if cls not in cls._instances:
+            cls._instances[cls] = super().__call__(*args, **kwargs)
+        return cls._instances[cls]
diff --git a/tests/unit/test_sd_file_system.py b/tests/unit/test_sd_file_system.py
new file mode 100644
index 0000000..19fb686
--- /dev/null
+++ b/tests/unit/test_sd_file_system.py
@@ -0,0 +1,337 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestSDFileSystem."""
+from typing import Callable
+from unittest import mock
+
+from sdfs import SDFileSystem
+
+from .sd_file_system_mocks import mock_final_fs
+from .seismic_dms_client_mocks import mock_osdu_request
+
+MOCK_SD_PATH = "sd://osdu/osdu-mdio/autotest_path/unit"
+MOCK_SEISMIC_STORE_URL = "https://mdio.test/api/seismic-store/v3"
+MOCK_ACCESS_TOKEN = "MOCK_ACCESS_TOKEN"  # noqa: S105
+
+
+class TestSDFileSystem:
+    """TestSDFileSystem."""
+
+    @mock.patch("sdfs.SDFileSystem._get_final_fs", side_effect=mock_final_fs)
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_sd_fs_interface(self, _req_mock: Callable, _fs_mock: Callable) -> None:  # noqa: PLR0915
+        """Testing SD file system interface.
+
+        Args:
+            _req_mock (Callable): _req_mock magic mock
+            _fs_mock (Callable): _fs_mock magic mock
+        """
+        sd_file_system = SDFileSystem(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        mock_sd_path = MOCK_SD_PATH + "/test"
+
+        assert sd_file_system
+
+        assert sd_file_system.transaction
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["transaction"] == 1
+
+        sd_file_system.start_transaction()
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["start_transaction"] == 1
+
+        sd_file_system.end_transaction()
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["end_transaction"] == 1
+
+        sd_file_system.invalidate_cache()
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["invalidate_cache"] == 1
+
+        mkdir_counter = 1
+        sd_file_system.mkdir(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["mkdir"] == mkdir_counter
+
+        makedirs_counter = 1
+        sd_file_system.makedirs(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["makedirs"] == makedirs_counter
+
+        sd_file_system.rmdir(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["rmdir"] == 1
+
+        ls_counter = 1
+        sd_file_system.ls(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["ls"] == ls_counter
+
+        sd_file_system._ls_from_cache(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["_ls_from_cache"] == 1
+
+        sd_file_system.find(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["find"] == 1
+
+        du_counter = 1
+        sd_file_system.du(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["du"] == du_counter
+
+        sd_file_system.glob(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["glob"] == 1
+
+        sd_file_system.expand_path(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["expand_path"] == 1
+
+        sd_file_system.exists(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["exists"] == 1
+
+        sd_file_system.lexists(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["lexists"] == 1
+
+        info_counter = 1
+        sd_file_system.info(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["info"] == info_counter
+
+        sd_file_system.checksum(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["checksum"] == 1
+
+        sd_file_system.size(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["size"] == 1
+
+        sd_file_system.sizes([mock_sd_path])
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["sizes"] == 1
+
+        sd_file_system.isdir(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["isdir"] == 1
+
+        sd_file_system.isfile(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["isfile"] == 1
+
+        sd_file_system.write_text(mock_sd_path, "value")
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["write_text"] == 1
+
+        sd_file_system.read_text(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["read_text"] == 1
+
+        sd_file_system.read_block(mock_sd_path, 0, 5)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["read_block"] == 1
+
+        pipe_file_counter = 1
+        sd_file_system.pipe_file(mock_sd_path, "value")
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["pipe_file"] == pipe_file_counter
+
+        sd_file_system.pipe(mock_sd_path, "value")
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["pipe"] == 1
+
+        cat_file_counter = 1
+        sd_file_system.cat_file(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["cat_file"] == cat_file_counter
+
+        sd_file_system.cat_ranges([mock_sd_path], [0], [5])
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["cat_ranges"] == 1
+
+        sd_file_system.cat(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["cat"] == 1
+
+        sd_file_system.get_file(mock_sd_path, "test")
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["get_file"] == 1
+
+        get_counter = 1
+        sd_file_system.get(mock_sd_path, "local_path")
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["get"] == get_counter
+
+        sd_file_system.put_file("test", mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["put_file"] == 1
+
+        put_counter = 1
+        sd_file_system.put("test", mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["put"] == put_counter
+
+        sd_file_system.head(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["head"] == 1
+
+        sd_file_system.tail(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["tail"] == 1
+
+        sd_file_system.cp_file(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["cp_file"] == 1
+
+        copy_counter = 1
+        sd_file_system.copy(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["copy"] == copy_counter
+
+        mv_counter = 1
+        sd_file_system.mv(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["mv"] == mv_counter
+
+        assert "force_get_final_fs" not in final_fs_mock._result
+
+        rm_file_counter = 1
+        sd_file_system.rm_file(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["rm_file"] == rm_file_counter
+
+        assert final_fs_mock._result["force_get_final_fs"] == 1
+
+        sd_file_system._rm(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        rm_file_counter += 1
+        assert final_fs_mock._result["rm_file"] == rm_file_counter
+
+        rm_counter = 1
+        sd_file_system.rm(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["rm"] == rm_counter
+
+        sd_file_system._open(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["_open"] == 1
+
+        sd_file_system.open(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["open"] == 1
+
+        sd_file_system.touch(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["touch"] == 1
+
+        sd_file_system.ukey(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["ukey"] == 1
+
+        sd_file_system.created(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["created"] == 1
+
+        sd_file_system.modified(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        assert final_fs_mock._result["modified"] == 1
+
+        # ------------------------------------------
+        # Aliases
+
+        sd_file_system.read_bytes(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        cat_file_counter += 1
+        assert final_fs_mock._result["cat_file"] == cat_file_counter
+
+        sd_file_system.write_bytes(mock_sd_path, "value")
+        final_fs_mock = sd_file_system._get_final_fs()
+        pipe_file_counter += 1
+        assert final_fs_mock._result["pipe_file"] == pipe_file_counter
+
+        sd_file_system.makedir(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        mkdir_counter += 1
+        assert final_fs_mock._result["mkdir"] == mkdir_counter
+
+        sd_file_system.mkdirs(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        makedirs_counter += 1
+        assert final_fs_mock._result["makedirs"] == makedirs_counter
+
+        sd_file_system.listdir(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        ls_counter += 1
+        assert final_fs_mock._result["ls"] == ls_counter
+
+        sd_file_system.cp(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        copy_counter += 1
+        assert final_fs_mock._result["copy"] == copy_counter
+
+        sd_file_system.move(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        mv_counter += 1
+        assert final_fs_mock._result["mv"] == mv_counter
+
+        sd_file_system.stat(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        info_counter += 1
+        assert final_fs_mock._result["info"] == info_counter
+
+        sd_file_system.disk_usage(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        du_counter += 1
+        assert final_fs_mock._result["du"] == du_counter
+
+        sd_file_system.rename(mock_sd_path, mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        mv_counter += 1
+        assert final_fs_mock._result["mv"] == mv_counter
+
+        sd_file_system.delete(mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        rm_counter += 1
+        assert final_fs_mock._result["rm"] == rm_counter
+
+        sd_file_system.upload("test", mock_sd_path)
+        final_fs_mock = sd_file_system._get_final_fs()
+        put_counter += 1
+        assert final_fs_mock._result["put"] == put_counter
+
+        sd_file_system.download(mock_sd_path, "local_path")
+        final_fs_mock = sd_file_system._get_final_fs()
+        get_counter += 1
+        assert final_fs_mock._result["get"] == get_counter
+
+        result = sd_file_system.dataset_audit()
+        final_fs_mock = sd_file_system._get_final_fs()
+        du_counter += 1
+        assert final_fs_mock._result["du"] == du_counter
+        assert result["size"] == final_fs_mock._default_file_size
+
+        result = sd_file_system.update_dataset_metadata()
+        final_fs_mock = sd_file_system._get_final_fs()
+        du_counter += 1
+        assert final_fs_mock._result["du"] == du_counter
diff --git a/tests/unit/test_seismic_dms_client.py b/tests/unit/test_seismic_dms_client.py
new file mode 100644
index 0000000..7ebc930
--- /dev/null
+++ b/tests/unit/test_seismic_dms_client.py
@@ -0,0 +1,228 @@
+"""
+* Copyright 2023-2024, TGS
+* Copyright 2023-2024, EPAM
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+"""
+
+"""TestSeismicDmsClient."""
+from typing import Callable
+from unittest import mock
+
+import pytest
+from sdfs.clients.seismic_dms_client import SeismicDmsClient
+from sdfs.exceptions import AuthStrategyError, DatasetBoundariesError
+
+from .seismic_dms_client_mocks import (
+    MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA,
+    MOCK_DATASET_INFO_RESPONSE_DATA,
+    MOCK_REFRESH_TOKEN_RESPONSE_DATA,
+    mock_osdu_request,
+)
+
+MOCK_AUTH_REFRESH_TOKEN_URL = "https://oauth2.googleapis.com/token"  # noqa: S105
+MOCK_ACCESS_TOKEN = "MOCK_ACCESS_TOKEN"  # noqa: S105
+MOCK_REFRESH_TOKEN = "MOCK_REFRESH_TOKEN"  # noqa: S105
+MOCK_CLIENT_ID = "MOCK_CLIENT_ID"
+MOCK_CLIENT_SECRET = "MOCK_CLIENT_SECRET"  # noqa: S105
+MOCK_SD_PATH = "sd://osdu/osdu-mdio/autotest_path/unit"  # noqa: B950
+MOCK_SEISMIC_STORE_URL = "https://mdio.test/api/seismic-store/v3"
+
+
+class TestSeismicDmsClient:
+    """TestSeismicDmsClient."""
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_access_token_auth_strategy(self, _: Callable) -> None:
+        """Test case for access_token auth strategy.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        assert seismic_dms_client._auth_strategy == "access_token"
+        assert seismic_dms_client._access_token == MOCK_ACCESS_TOKEN
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_refresh_token_auth_strategy(self, _: Callable) -> None:
+        """Test case for refresh_token auth strategy.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            refresh_url=MOCK_AUTH_REFRESH_TOKEN_URL,
+            refresh_token=MOCK_REFRESH_TOKEN,
+            client_id=MOCK_CLIENT_ID,
+            client_secret=MOCK_CLIENT_SECRET,
+        )
+
+        assert seismic_dms_client._access_token
+        assert seismic_dms_client._auth_strategy == "refresh_token"
+        assert (
+            seismic_dms_client._access_token
+            == MOCK_REFRESH_TOKEN_RESPONSE_DATA["access_token"]
+        )
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_refresh_token(self, _: Callable) -> None:
+        """Test case for refresh token method (osdu request).
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client_access_token = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        with pytest.raises(
+            AuthStrategyError, match="This auth strategy don't support this method"
+        ):
+            seismic_dms_client_access_token.refresh_token()
+
+        seismic_dms_client_refresh_token = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            refresh_url=MOCK_AUTH_REFRESH_TOKEN_URL,
+            refresh_token=MOCK_REFRESH_TOKEN,
+            client_id=MOCK_CLIENT_ID,
+            client_secret=MOCK_CLIENT_SECRET,
+        )
+
+        token = seismic_dms_client_refresh_token.refresh_token()
+        assert token == MOCK_ACCESS_TOKEN
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_get_service_provider_code(self, _: Callable) -> None:
+        """Test case for testing cloud provider code extraction.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        cloud_code = seismic_dms_client.get_service_provider_code()
+        assert cloud_code in ["gc", "aws", "azure", "ibm", "anthos"]
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_get_dataset_properties(self, _: Callable) -> None:
+        """Test case for testing dataset information extraction from Seismic DMS.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        dataset_info = seismic_dms_client.get_dataset_properties(True)
+        assert dataset_info["tenant"] == seismic_dms_client._sd_tenant
+        assert dataset_info["subproject"] == seismic_dms_client._sd_subproject
+        assert seismic_dms_client._sd_subproject_path in dataset_info["path"]
+        assert dataset_info["name"] == seismic_dms_client._sd_dataset_name
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_get_downscoped_credentials(self, _: Callable) -> None:
+        """Test case for testing dataset downscoped cred extraction for data management.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        downscoped_credentials = seismic_dms_client.get_downscoped_credentials(True)
+        assert downscoped_credentials["access_token"] != MOCK_ACCESS_TOKEN
+        assert (
+            downscoped_credentials["access_token"]
+            == MOCK_DATASET_DOWNSCOPED_CREDENTIALS_RESPONSE_DATA["access_token"]
+        )
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_dataset_storage_root(self, _: Callable) -> None:
+        """Test case for getting cloud native dataset unl.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        dataset_storage_root = seismic_dms_client.dataset_storage_root
+        assert MOCK_DATASET_INFO_RESPONSE_DATA["gcsurl"] in dataset_storage_root
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_get_dataset_storage_url(self, _: Callable) -> None:
+        """Test case for getting storage url.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        with pytest.raises(
+            DatasetBoundariesError,
+            match="The sdpath is not in the dataset's boundaries",
+        ):
+            seismic_dms_client.get_dataset_storage_url("test_url")
+
+        dataset_storage_url = seismic_dms_client.get_dataset_storage_url(
+            f"{MOCK_SD_PATH}/test.txt"
+        )
+
+        assert dataset_storage_url
+        assert "test.txt" in dataset_storage_url
+        assert MOCK_DATASET_INFO_RESPONSE_DATA["gcsurl"] in dataset_storage_url
+
+    @mock.patch("requests.request", side_effect=mock_osdu_request)
+    def test_get_storage_options(self, _: Callable) -> None:
+        """Test case for getting cloud native storage options.
+
+        Args:
+            _ (Callable): mock_osdu_request
+        """
+        seismic_dms_client = SeismicDmsClient(
+            sdpath=MOCK_SD_PATH,
+            seismic_dms_url=MOCK_SEISMIC_STORE_URL,
+            access_token=MOCK_ACCESS_TOKEN,
+        )
+
+        storage_options = seismic_dms_client.get_storage_options()
+
+        assert storage_options
+        assert isinstance(storage_options, dict)
diff --git a/version.py b/version.py
new file mode 100644
index 0000000..19b31a1
--- /dev/null
+++ b/version.py
@@ -0,0 +1,57 @@
+"""Version generation script."""
+import os
+from pathlib import Path
+
+
+def get_version_from_file() -> str:
+    """Get version from version file.
+
+    Returns:
+        str: version file hardcoded version
+    """
+    with Path("VERSION").open() as fh:
+        return fh.read().strip()
+
+
+def prepare_version() -> str:
+    """Preparing result package version by git constants and GitLab CI env variable.
+
+    Returns:
+        str: result version
+    """
+    version = os.getenv("BUILD_TAG", "")
+
+    if version.startswith("v"):
+        # release tag version, e.g. v0.9.0
+        version = version[1:]
+    else:
+        # we assume that it is commit version
+        # https://packaging.python.org/guides/distributing-packages-using-setuptools/#local-version-identifiers
+        # else, if there are no COMMIT_SHA and BUILD_ID, we assume it is a local build
+        try:
+            commit = os.environ["BUILD_COMMIT_SHORT_SHA"]
+            build_id = os.environ["BUILD_ID"]
+            branch_name = os.environ["CI_COMMIT_BRANCH"]
+            default_branch_name = os.environ["CI_DEFAULT_BRANCH"]
+            merge_request = os.environ["CI_PIPELINE_SOURCE"]
+            event = "merge_request_event"
+
+            if branch_name == default_branch_name:
+                release_identifier = "rc"
+            elif merge_request == event:
+                release_identifier = ".dev"
+            else:
+                release_identifier = ".dev"
+
+            version = (
+                f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
+            )
+
+        except KeyError:
+            commit = os.environ["BUILD_COMMIT_SHORT_SHA"]
+            version = f"{get_version_from_file()}{event}+{commit}"
+
+    return version
+
+
+print(prepare_version())
-- 
GitLab


From 033677e0b68985378db51d529e3903baf03e0f65 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Tue, 24 Dec 2024 10:56:37 +0200
Subject: [PATCH 002/111] Remove Project name

---
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.md b/README.md
index 667f17b..9a415aa 100644
--- a/README.md
+++ b/README.md
@@ -55,7 +55,7 @@ Unit tests are located in the _tests_/_unit_ directory.
 ```
 <!-- Seismic related vars -->
 export SD_PATH=... (default value: sd://osdu/osdu-mdio/autotest_path/integration)
-export SEISMIC_STORE_URL=... (default value: https://mdio.endpoints.or2-msq-tgs-mdio-t1iylu.cloud.goog/api/seismic-store/v3)
+export SEISMIC_STORE_URL=... (default value: https://<OSDU_INGRES_DOMAIN>/api/seismic-store/v3)
 
 <!-- For access token use case -->
 export ACCESS_TOKEN=...
-- 
GitLab


From 6a3edab0318fa3a5e8fb05dfd5d7012f54ad82ff Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 14:57:14 +0200
Subject: [PATCH 003/111] Add pipeline

---
 .flake8        |  8 ++++++++
 .gitlab-ci.yml | 16 ++++++++++++++++
 2 files changed, 24 insertions(+)
 create mode 100644 .flake8
 create mode 100644 .gitlab-ci.yml

diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..89c2b9a
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,8 @@
+[flake8]
+select = B,B9,C,D,DAR,E,F,N,RST,S,W
+ignore = E203,E501,RST201,RST203,RST301,W503
+max-line-length = 88
+max-complexity = 10
+docstring-convention = google
+rst-roles = class,const,func,meth,mod,ref
+rst-directives = deprecated
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..e8e54e0
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,16 @@
+stages:
+  - lint
+
+
+linter:
+  stage: lint
+  image: python:3.11
+  before_script:
+    - pip install flake8
+  script:
+    - flake8 .
+  artifacts:
+    when: always
+    paths:
+      - flake8_report.txt
+  allow_failure: false #true
-- 
GitLab


From bbffe0a7b95f6acf5d1f3bbaec8aee929f3595cf Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 15:30:42 +0200
Subject: [PATCH 004/111] Test lint step

---
 .flake8        | 4 +++-
 .gitlab-ci.yml | 2 +-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/.flake8 b/.flake8
index 89c2b9a..a0a1b1c 100644
--- a/.flake8
+++ b/.flake8
@@ -5,4 +5,6 @@ max-line-length = 88
 max-complexity = 10
 docstring-convention = google
 rst-roles = class,const,func,meth,mod,ref
-rst-directives = deprecated
\ No newline at end of file
+rst-directives = deprecated
+
+exclude = tests/*
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e8e54e0..d09726d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -13,4 +13,4 @@ linter:
     when: always
     paths:
       - flake8_report.txt
-  allow_failure: false #true
+  allow_failure: true #allow job failure gitlab
-- 
GitLab


From 3119d8cfb0518b71ad301c9957fd01afcc53b754 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 15:34:48 +0200
Subject: [PATCH 005/111] Test lint step

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d09726d..e5f90fa 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,7 +8,7 @@ linter:
   before_script:
     - pip install flake8
   script:
-    - flake8 .
+    - flake8 . --tee --output-file=flake8_report.txt
   artifacts:
     when: always
     paths:
-- 
GitLab


From a69270a4e94cfd919f5438eb3f3924ede64887c5 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 15:45:16 +0200
Subject: [PATCH 006/111] Test lint step

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e5f90fa..ecdc83e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -13,4 +13,4 @@ linter:
     when: always
     paths:
       - flake8_report.txt
-  allow_failure: true #allow job failure gitlab
+  #allow_failure: true #allow job failure gitlab
-- 
GitLab


From 3ebd89c78f2d891fc4dc5307e4118573ebf9db2e Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 15:57:55 +0200
Subject: [PATCH 007/111] Test lint step

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ecdc83e..0c9680c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,9 +8,9 @@ linter:
   before_script:
     - pip install flake8
   script:
-    - flake8 . --tee --output-file=flake8_report.txt
+    - flake8 . --color --tee --output-file=flake8_report.txt
   artifacts:
     when: always
     paths:
       - flake8_report.txt
-  #allow_failure: true #allow job failure gitlab
+  allow_failure: true #allow job failure gitlab
-- 
GitLab


From 6494e0f598560f40e2ab54b3d686aef17f839acf Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:00:26 +0200
Subject: [PATCH 008/111] Test lint step

---
 .flake8        | 1 +
 .gitlab-ci.yml | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/.flake8 b/.flake8
index a0a1b1c..9f8a364 100644
--- a/.flake8
+++ b/.flake8
@@ -6,5 +6,6 @@ max-complexity = 10
 docstring-convention = google
 rst-roles = class,const,func,meth,mod,ref
 rst-directives = deprecated
+color = always
 
 exclude = tests/*
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0c9680c..e5f90fa 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,7 +8,7 @@ linter:
   before_script:
     - pip install flake8
   script:
-    - flake8 . --color --tee --output-file=flake8_report.txt
+    - flake8 . --tee --output-file=flake8_report.txt
   artifacts:
     when: always
     paths:
-- 
GitLab


From 185feb9a307ed775a326428bdb9ef3326e0de2aa Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:02:28 +0200
Subject: [PATCH 009/111] Test lint step

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e5f90fa..62e604f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -6,7 +6,7 @@ linter:
   stage: lint
   image: python:3.11
   before_script:
-    - pip install flake8
+    - pip install flake8, flake8-colors
   script:
     - flake8 . --tee --output-file=flake8_report.txt
   artifacts:
-- 
GitLab


From 3bc0903e965368249d86cb4bb10243511bb096ef Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:04:59 +0200
Subject: [PATCH 010/111] Test lint step

---
 .gitlab-ci.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 62e604f..7390512 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -6,7 +6,8 @@ linter:
   stage: lint
   image: python:3.11
   before_script:
-    - pip install flake8, flake8-colors
+    - pip install flake8
+    - pip install flake8-colors
   script:
     - flake8 . --tee --output-file=flake8_report.txt
   artifacts:
-- 
GitLab


From 9c27ff5be555014d14fb8ae5c0e3ade5fa7c038c Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:08:46 +0200
Subject: [PATCH 011/111] Test lint step scan

---
 .gitlab-ci.yml | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7390512..88795bb 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,5 +1,6 @@
 stages:
   - lint
+  - scan
 
 
 linter:
@@ -7,7 +8,6 @@ linter:
   image: python:3.11
   before_script:
     - pip install flake8
-    - pip install flake8-colors
   script:
     - flake8 . --tee --output-file=flake8_report.txt
   artifacts:
@@ -15,3 +15,10 @@ linter:
     paths:
       - flake8_report.txt
   allow_failure: true #allow job failure gitlab
+
+ruff:
+  stage: scan
+  image: python:3.11
+  script:
+    - poetry install
+    - poetry run ruff check .
-- 
GitLab


From 532de98974cc07e088ad1f718c9cf0a83a68a035 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:13:14 +0200
Subject: [PATCH 012/111] Test lint step scan

---
 .gitlab-ci.yml | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 88795bb..87e4cf3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -19,6 +19,10 @@ linter:
 ruff:
   stage: scan
   image: python:3.11
-  script:
+  before_script:
+    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
+    - apt-get update
+    - apt-get install -y jq curl
     - poetry install
+  script:
     - poetry run ruff check .
-- 
GitLab


From a3ac52292f82d5383ac7bc25614838898a6041dd Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:13:46 +0200
Subject: [PATCH 013/111] Test lint step scan

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 87e4cf3..15f9b9c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -20,9 +20,9 @@ ruff:
   stage: scan
   image: python:3.11
   before_script:
-    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
     - apt-get update
     - apt-get install -y jq curl
+    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
     - poetry install
   script:
     - poetry run ruff check .
-- 
GitLab


From 5974b87a130ad44742cee0d0a3e817198e5fffd4 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:18:35 +0200
Subject: [PATCH 014/111] Unit-tests

---
 .gitlab-ci.yml | 33 +++++++++++++++++++++++----------
 1 file changed, 23 insertions(+), 10 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 15f9b9c..6c457d3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,23 +1,36 @@
 stages:
   - lint
-  - scan
+  - unit_tests
 
 
+#linter:
+#  stage: lint
+#  image: python:3.11
+#  before_script:
+#    - pip install flake8
+#  script:
+#    - flake8 . --tee --output-file=flake8_report.txt
+#  artifacts:
+#    when: always
+#    paths:
+#      - flake8_report.txt
+#  allow_failure: true #allow job failure gitlab
+
 linter:
   stage: lint
   image: python:3.11
   before_script:
-    - pip install flake8
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
+    - poetry install
   script:
-    - flake8 . --tee --output-file=flake8_report.txt
-  artifacts:
-    when: always
-    paths:
-      - flake8_report.txt
+    - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
 
-ruff:
-  stage: scan
+
+unit_tests:
+  stage: unit_tests
   image: python:3.11
   before_script:
     - apt-get update
@@ -25,4 +38,4 @@ ruff:
     - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
     - poetry install
   script:
-    - poetry run ruff check .
+    - nox --session=unit-tests-3.11
-- 
GitLab


From 4eccdc3e691d8b75d5ce2970d495cdd0a62db720 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:27:45 +0200
Subject: [PATCH 015/111] Unit-tests

---
 .gitlab-ci.yml | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6c457d3..e3691fb 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,6 +2,14 @@ stages:
   - lint
   - unit_tests
 
+.install-deps:
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install poetry
+    - pip install nox nox-poetry
+    - poetry --version
 
 #linter:
 #  stage: lint
@@ -19,11 +27,8 @@ stages:
 linter:
   stage: lint
   image: python:3.11
+  extends: .install_deps
   before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
-    - poetry install
   script:
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
@@ -32,10 +37,5 @@ linter:
 unit_tests:
   stage: unit_tests
   image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip & pip install nox & pip install nox-poetry & pip install poetry
-    - poetry install
   script:
     - nox --session=unit-tests-3.11
-- 
GitLab


From 5ff69ad3356b086d49883b8eeaa39019175ca076 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:28:09 +0200
Subject: [PATCH 016/111] Unit-tests

---
 .gitlab-ci.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e3691fb..6b85fbe 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -28,7 +28,6 @@ linter:
   stage: lint
   image: python:3.11
   extends: .install_deps
-  before_script:
   script:
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
-- 
GitLab


From 8cf1c213d68d8c829b008894475c00e1387f6d9a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:30:21 +0200
Subject: [PATCH 017/111] Unit-tests

---
 .gitlab-ci.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6b85fbe..a2a8abb 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,7 +27,7 @@ stages:
 linter:
   stage: lint
   image: python:3.11
-  extends: .install_deps
+  extends: .install-deps
   script:
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
@@ -36,5 +36,6 @@ linter:
 unit_tests:
   stage: unit_tests
   image: python:3.11
+  extends: .install_deps
   script:
     - nox --session=unit-tests-3.11
-- 
GitLab


From 07bcf61acb8ebfb8eadc00d390e95ebf60ec9dbc Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:34:15 +0200
Subject: [PATCH 018/111] Unit-tests

---
 .gitlab-ci.yml | 23 +++++++++--------------
 1 file changed, 9 insertions(+), 14 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a2a8abb..932dc35 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -11,23 +11,18 @@ stages:
     - pip install nox nox-poetry
     - poetry --version
 
-#linter:
-#  stage: lint
-#  image: python:3.11
-#  before_script:
-#    - pip install flake8
-#  script:
-#    - flake8 . --tee --output-file=flake8_report.txt
-#  artifacts:
-#    when: always
-#    paths:
-#      - flake8_report.txt
-#  allow_failure: true #allow job failure gitlab
 
 linter:
   stage: lint
   image: python:3.11
-  extends: .install-deps
+  #extends: .install-deps
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install poetry
+    - pip install nox nox-poetry
+    - poetry --version
   script:
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
@@ -36,6 +31,6 @@ linter:
 unit_tests:
   stage: unit_tests
   image: python:3.11
-  extends: .install_deps
+  #extends: .install_deps
   script:
     - nox --session=unit-tests-3.11
-- 
GitLab


From 1082061438b45f8c55b2330058c3272dab91f248 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:37:01 +0200
Subject: [PATCH 019/111] Unit-tests

---
 .gitlab-ci.yml | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 932dc35..1ac514f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -20,10 +20,10 @@ linter:
     - apt-get update
     - apt-get install -y jq curl
     - pip install --upgrade pip
-    - pip install poetry
     - pip install nox nox-poetry
     - poetry --version
   script:
+    - pip install poetry
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
 
@@ -31,6 +31,12 @@ linter:
 unit_tests:
   stage: unit_tests
   image: python:3.11
-  #extends: .install_deps
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox nox-poetry
+    - poetry --version
   script:
+    - pip install poetry
     - nox --session=unit-tests-3.11
-- 
GitLab


From e82847ae543817a81b7eee42a190488bb78de443 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:38:04 +0200
Subject: [PATCH 020/111] Unit-tests

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1ac514f..4c0b2ff 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -21,9 +21,9 @@ linter:
     - apt-get install -y jq curl
     - pip install --upgrade pip
     - pip install nox nox-poetry
-    - poetry --version
   script:
     - pip install poetry
+    - poetry --version
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
 
@@ -36,7 +36,7 @@ unit_tests:
     - apt-get install -y jq curl
     - pip install --upgrade pip
     - pip install nox nox-poetry
-    - poetry --version
   script:
     - pip install poetry
+    - poetry --version
     - nox --session=unit-tests-3.11
-- 
GitLab


From b2d12b3c3af05ad9fdaef6b06bd42479df55d033 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:50:18 +0200
Subject: [PATCH 021/111] Unit-tests

---
 .gitlab-ci.yml | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4c0b2ff..ae2743e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -20,10 +20,12 @@ linter:
     - apt-get update
     - apt-get install -y jq curl
     - pip install --upgrade pip
-    - pip install nox nox-poetry
-  script:
+    - pip install nox
+    - pip install nox-poetry
     - pip install poetry
     - poetry --version
+  script:
+    - pip install poetry
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
 
@@ -35,8 +37,10 @@ unit_tests:
     - apt-get update
     - apt-get install -y jq curl
     - pip install --upgrade pip
-    - pip install nox nox-poetry
-  script:
+    - pip install nox
+    - pip install nox-poetry
     - pip install poetry
     - poetry --version
+  script:
+    - pip install poetry
     - nox --session=unit-tests-3.11
-- 
GitLab


From 19ab314a60472917d471ae0a6286a0efdf90f149 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:55:57 +0200
Subject: [PATCH 022/111] Unit-tests

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ae2743e..7563396 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -25,7 +25,7 @@ linter:
     - pip install poetry
     - poetry --version
   script:
-    - pip install poetry
+    - poetry install
     - poetry run ruff check .
   allow_failure: true #allow job failure gitlab
 
@@ -42,5 +42,5 @@ unit_tests:
     - pip install poetry
     - poetry --version
   script:
-    - pip install poetry
+    - poetry install
     - nox --session=unit-tests-3.11
-- 
GitLab


From c33cf7d54eca95e85b2858556af97886fb7804de Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 16:58:00 +0200
Subject: [PATCH 023/111] publish package

---
 .gitlab-ci.yml | 33 +++++++++++++++++++++++++++++++++
 1 file changed, 33 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7563396..bb2c658 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,6 +1,7 @@
 stages:
   - lint
   - unit_tests
+  - build
 
 .install-deps:
   before_script:
@@ -44,3 +45,35 @@ unit_tests:
   script:
     - poetry install
     - nox --session=unit-tests-3.11
+
+
+publish-package:
+  stage: build
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
+#  rules:
+#    - if: '$CI_COMMIT_BRANCH'
+#    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+#    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
+#      when: never
+#    - if: $CI_COMMIT_TAG
\ No newline at end of file
-- 
GitLab


From c73e0f328de2c8acb8dd1e10b00566ce5474601f Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 17:47:31 +0200
Subject: [PATCH 024/111] delete .flake8

---
 .flake8 | 11 -----------
 1 file changed, 11 deletions(-)
 delete mode 100644 .flake8

diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 9f8a364..0000000
--- a/.flake8
+++ /dev/null
@@ -1,11 +0,0 @@
-[flake8]
-select = B,B9,C,D,DAR,E,F,N,RST,S,W
-ignore = E203,E501,RST201,RST203,RST301,W503
-max-line-length = 88
-max-complexity = 10
-docstring-convention = google
-rst-roles = class,const,func,meth,mod,ref
-rst-directives = deprecated
-color = always
-
-exclude = tests/*
\ No newline at end of file
-- 
GitLab


From 3912577a5a954ed7dcea21b05f374ef6ae32ef22 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 26 Dec 2024 23:53:30 +0200
Subject: [PATCH 025/111] test

---
 .gitlab-ci.yml | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bb2c658..73f924b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,6 +3,14 @@ stages:
   - unit_tests
   - build
 
+
+variables:
+  BUILD_TAG: $CI_COMMIT_TAG
+  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
+  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
+  BUILD_ID: $CI_PIPELINE_IID
+
+
 .install-deps:
   before_script:
     - apt-get update
-- 
GitLab


From 96e342bbbbc2abb3beb34a2aa98e52177fd28060 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 00:11:14 +0200
Subject: [PATCH 026/111] test

---
 .gitlab-ci.yml | 2 +-
 version.py     | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 73f924b..b7e2047 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,7 +8,7 @@ variables:
   BUILD_TAG: $CI_COMMIT_TAG
   CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
   BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
-  BUILD_ID: $CI_PIPELINE_IID
+
 
 
 .install-deps:
diff --git a/version.py b/version.py
index 19b31a1..f644588 100644
--- a/version.py
+++ b/version.py
@@ -30,7 +30,7 @@ def prepare_version() -> str:
         # else, if there are no COMMIT_SHA and BUILD_ID, we assume it is a local build
         try:
             commit = os.environ["BUILD_COMMIT_SHORT_SHA"]
-            build_id = os.environ["BUILD_ID"]
+            #build_id = os.environ["BUILD_ID"]
             branch_name = os.environ["CI_COMMIT_BRANCH"]
             default_branch_name = os.environ["CI_DEFAULT_BRANCH"]
             merge_request = os.environ["CI_PIPELINE_SOURCE"]
@@ -44,7 +44,7 @@ def prepare_version() -> str:
                 release_identifier = ".dev"
 
             version = (
-                f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
+                f"{get_version_from_file()}{release_identifier}+{commit}" #f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
             )
 
         except KeyError:
-- 
GitLab


From 88d7429f7eeda253a5d21840ce605761846c7fe8 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 00:32:49 +0200
Subject: [PATCH 027/111] test

---
 version.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/version.py b/version.py
index f644588..fa6f974 100644
--- a/version.py
+++ b/version.py
@@ -39,12 +39,13 @@ def prepare_version() -> str:
             if branch_name == default_branch_name:
                 release_identifier = "rc"
             elif merge_request == event:
-                release_identifier = ".dev"
+                release_identifier = "dev"
             else:
-                release_identifier = ".dev"
+                release_identifier = "dev"
 
             version = (
-                f"{get_version_from_file()}{release_identifier}+{commit}" #f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
+                f"{get_version_from_file()}{release_identifier}+{commit}"
+                #f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
             )
 
         except KeyError:
-- 
GitLab


From ffdb5d509efda09add099ae0ed3db23534197032 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 01:06:48 +0200
Subject: [PATCH 028/111] test

---
 .gitlab-ci.yml                          |  8 ++++++-
 src/sdfs/__init__.py                    | 32 ++++++++++++-------------
 src/sdfs/clients/__init__.py            | 30 +++++++++++------------
 src/sdfs/clients/seismic_dms_client.py  | 32 ++++++++++++-------------
 src/sdfs/core.py                        | 31 ++++++++++++------------
 src/sdfs/exceptions.py                  | 32 ++++++++++++-------------
 src/sdfs/providers/__init__.py          | 31 ++++++++++++------------
 src/sdfs/providers/abstract_provider.py | 32 ++++++++++++-------------
 src/sdfs/providers/anthos.py            | 32 ++++++++++++-------------
 src/sdfs/providers/aws.py               | 32 ++++++++++++-------------
 src/sdfs/providers/azure.py             | 32 ++++++++++++-------------
 src/sdfs/providers/factory.py           | 32 ++++++++++++-------------
 src/sdfs/providers/google.py            | 32 ++++++++++++-------------
 src/sdfs/providers/ibm.py               | 32 ++++++++++++-------------
 version.py                              |  9 ++++---
 15 files changed, 213 insertions(+), 216 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b7e2047..618dc71 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,3 +1,9 @@
+include:
+  # fossa
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/fossa-python.yml"
+
+
 stages:
   - lint
   - unit_tests
@@ -8,7 +14,7 @@ variables:
   BUILD_TAG: $CI_COMMIT_TAG
   CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
   BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
-
+  BUILD_ID: $CI_PIPELINE_IID
 
 
 .install-deps:
diff --git a/src/sdfs/__init__.py b/src/sdfs/__init__.py
index 8cd3781..e88e705 100644
--- a/src/sdfs/__init__.py
+++ b/src/sdfs/__init__.py
@@ -1,22 +1,20 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
-"""SDFS library."""
 
+"""SDFS library."""
 
 from importlib import metadata
 
diff --git a/src/sdfs/clients/__init__.py b/src/sdfs/clients/__init__.py
index a6f7356..b855077 100644
--- a/src/sdfs/clients/__init__.py
+++ b/src/sdfs/clients/__init__.py
@@ -1,17 +1,15 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
diff --git a/src/sdfs/clients/seismic_dms_client.py b/src/sdfs/clients/seismic_dms_client.py
index 6a813f9..fe7878c 100644
--- a/src/sdfs/clients/seismic_dms_client.py
+++ b/src/sdfs/clients/seismic_dms_client.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """OSDU Seismic DMS client."""
+
 import json
 import logging
 from typing import Any, Callable, Optional
diff --git a/src/sdfs/core.py b/src/sdfs/core.py
index 3fb6e7e..a31c583 100644
--- a/src/sdfs/core.py
+++ b/src/sdfs/core.py
@@ -1,19 +1,18 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """OSDU Seismic Store pythonic interface."""
 
diff --git a/src/sdfs/exceptions.py b/src/sdfs/exceptions.py
index 31e9646..6a8e041 100644
--- a/src/sdfs/exceptions.py
+++ b/src/sdfs/exceptions.py
@@ -1,22 +1,20 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
-"""SDFS Custom Exceptions."""
 
+"""SDFS Custom Exceptions."""
 
 class ValidationError(Exception):
     """Raise when there is a validation error in the code."""
diff --git a/src/sdfs/providers/__init__.py b/src/sdfs/providers/__init__.py
index f1e233c..76b8f8c 100644
--- a/src/sdfs/providers/__init__.py
+++ b/src/sdfs/providers/__init__.py
@@ -1,19 +1,18 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """This module exposes all cloud providers supported by the system and it's factory."""
 
diff --git a/src/sdfs/providers/abstract_provider.py b/src/sdfs/providers/abstract_provider.py
index d733915..cf7e38a 100644
--- a/src/sdfs/providers/abstract_provider.py
+++ b/src/sdfs/providers/abstract_provider.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """ProviderSpecific."""
+
 from abc import ABC, abstractmethod
 from typing import TYPE_CHECKING, Optional
 
diff --git a/src/sdfs/providers/anthos.py b/src/sdfs/providers/anthos.py
index 94f9f5f..6048553 100644
--- a/src/sdfs/providers/anthos.py
+++ b/src/sdfs/providers/anthos.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """AnthosSpecific."""
+
 from typing import Optional
 
 from .abstract_provider import ProviderSpecific
diff --git a/src/sdfs/providers/aws.py b/src/sdfs/providers/aws.py
index 0eb5a3c..faed1db 100644
--- a/src/sdfs/providers/aws.py
+++ b/src/sdfs/providers/aws.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """AwsSpecific."""
+
 from typing import Optional
 
 from .abstract_provider import ProviderSpecific
diff --git a/src/sdfs/providers/azure.py b/src/sdfs/providers/azure.py
index 0ea7719..0f92515 100644
--- a/src/sdfs/providers/azure.py
+++ b/src/sdfs/providers/azure.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """AzureSpecific."""
+
 from typing import Optional
 
 from .abstract_provider import ProviderSpecific
diff --git a/src/sdfs/providers/factory.py b/src/sdfs/providers/factory.py
index e321ca6..6097c5d 100644
--- a/src/sdfs/providers/factory.py
+++ b/src/sdfs/providers/factory.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """ProviderSpecificFactory."""
+
 from typing import TYPE_CHECKING, Optional
 
 from sdfs.exceptions import ServiceProviderCodeNotSpecifiedError, ValidationError
diff --git a/src/sdfs/providers/google.py b/src/sdfs/providers/google.py
index 019b902..b0e06f8 100644
--- a/src/sdfs/providers/google.py
+++ b/src/sdfs/providers/google.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """GoogleSpecific."""
+
 from os import path
 from typing import Optional
 
diff --git a/src/sdfs/providers/ibm.py b/src/sdfs/providers/ibm.py
index 8ffc9ef..0534e23 100644
--- a/src/sdfs/providers/ibm.py
+++ b/src/sdfs/providers/ibm.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """IbmSpecific."""
+
 from typing import Optional
 
 from .abstract_provider import ProviderSpecific
diff --git a/version.py b/version.py
index fa6f974..19b31a1 100644
--- a/version.py
+++ b/version.py
@@ -30,7 +30,7 @@ def prepare_version() -> str:
         # else, if there are no COMMIT_SHA and BUILD_ID, we assume it is a local build
         try:
             commit = os.environ["BUILD_COMMIT_SHORT_SHA"]
-            #build_id = os.environ["BUILD_ID"]
+            build_id = os.environ["BUILD_ID"]
             branch_name = os.environ["CI_COMMIT_BRANCH"]
             default_branch_name = os.environ["CI_DEFAULT_BRANCH"]
             merge_request = os.environ["CI_PIPELINE_SOURCE"]
@@ -39,13 +39,12 @@ def prepare_version() -> str:
             if branch_name == default_branch_name:
                 release_identifier = "rc"
             elif merge_request == event:
-                release_identifier = "dev"
+                release_identifier = ".dev"
             else:
-                release_identifier = "dev"
+                release_identifier = ".dev"
 
             version = (
-                f"{get_version_from_file()}{release_identifier}+{commit}"
-                #f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
+                f"{get_version_from_file()}{release_identifier}{build_id}+{commit}"
             )
 
         except KeyError:
-- 
GitLab


From 79145fc0187e63301a1eb8deb0c0b3202c9689f2 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 01:17:58 +0200
Subject: [PATCH 029/111] test

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 618dc71..3985de8 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,6 +7,7 @@ include:
 stages:
   - lint
   - unit_tests
+  - scan
   - build
 
 
-- 
GitLab


From e037ff4be32a4db2b82b429e63aa2cd83ce1ef57 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 01:20:27 +0200
Subject: [PATCH 030/111] test

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3985de8..e0caa03 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -46,7 +46,7 @@ linter:
   allow_failure: true #allow job failure gitlab
 
 
-unit_tests:
+compile-and-unit-test:
   stage: unit_tests
   image: python:3.11
   before_script:
-- 
GitLab


From 486cbd1f8649929a0c8c5329b46808ef6826b3bb Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 01:26:43 +0200
Subject: [PATCH 031/111] test

---
 docs/conf.py                                  | 32 +++++++++----------
 src/sdfs/utils/__init__.py                    | 31 +++++++++---------
 src/sdfs/utils/http_utils.py                  | 32 +++++++++----------
 src/sdfs/utils/validators.py                  | 32 +++++++++----------
 tests/__init__.py                             | 31 +++++++++---------
 tests/integration/__init__.py                 | 31 +++++++++---------
 tests/integration/conftest.py                 | 32 +++++++++----------
 .../test_sd_file_system_integration.py        | 32 +++++++++----------
 .../test_seismic_dms_client_integration.py    | 32 +++++++++----------
 .../test_single_file_sd_integration.py        | 32 +++++++++----------
 tests/test_retry_flow.py                      | 32 +++++++++----------
 tests/unit/__init__.py                        | 31 +++++++++---------
 tests/unit/providers/__init__.py              | 31 +++++++++---------
 tests/unit/providers/conftest.py              | 32 +++++++++----------
 tests/unit/providers/test_factory.py          | 32 +++++++++----------
 tests/unit/providers/test_google_provider.py  | 32 +++++++++----------
 tests/unit/sd_file_system_mocks.py            | 31 +++++++++---------
 tests/unit/seismic_dms_client_mocks.py        | 32 +++++++++----------
 tests/unit/singleton.py                       | 31 +++++++++---------
 tests/unit/test_sd_file_system.py             | 32 +++++++++----------
 tests/unit/test_seismic_dms_client.py         | 32 +++++++++----------
 21 files changed, 329 insertions(+), 336 deletions(-)

diff --git a/docs/conf.py b/docs/conf.py
index cf2549a..51fef6b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Sphinx configuration."""
+
 project = "SDFS"
 author = "EPAM"
 copyright = "2023, EPAM"  # noqa: A001
diff --git a/src/sdfs/utils/__init__.py b/src/sdfs/utils/__init__.py
index 92a6ccf..68e2241 100644
--- a/src/sdfs/utils/__init__.py
+++ b/src/sdfs/utils/__init__.py
@@ -1,18 +1,17 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Utils."""
diff --git a/src/sdfs/utils/http_utils.py b/src/sdfs/utils/http_utils.py
index 3ac1ba3..c9eeb8e 100644
--- a/src/sdfs/utils/http_utils.py
+++ b/src/sdfs/utils/http_utils.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Utils for improve python request library user experience."""
+
 import enum
 
 UNAUTHORIZED_CODE = 401
diff --git a/src/sdfs/utils/validators.py b/src/sdfs/utils/validators.py
index ae2eac3..787102b 100644
--- a/src/sdfs/utils/validators.py
+++ b/src/sdfs/utils/validators.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Common validators."""
+
 from urllib.parse import urlparse
 
 
diff --git a/tests/__init__.py b/tests/__init__.py
index aadbd4e..3fc15f6 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,18 +1,17 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Test suite for the SDFS package."""
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
index 8eadb73..17dfb96 100644
--- a/tests/integration/__init__.py
+++ b/tests/integration/__init__.py
@@ -1,18 +1,17 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Integration tests for the SDFS package."""
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index 46f99e3..4e77ad6 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Integration tests shared fixtures."""
+
 import logging
 import os
 from pathlib import Path
diff --git a/tests/integration/test_sd_file_system_integration.py b/tests/integration/test_sd_file_system_integration.py
index 1b5dfc2..da1286f 100644
--- a/tests/integration/test_sd_file_system_integration.py
+++ b/tests/integration/test_sd_file_system_integration.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestSDFileSystemIntegration."""
+
 import json
 import logging
 import os
diff --git a/tests/integration/test_seismic_dms_client_integration.py b/tests/integration/test_seismic_dms_client_integration.py
index b2cacb1..6cbd8e4 100644
--- a/tests/integration/test_seismic_dms_client_integration.py
+++ b/tests/integration/test_seismic_dms_client_integration.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestSeismicDmsClientIntegration."""
+
 import os
 
 import pytest
diff --git a/tests/integration/test_single_file_sd_integration.py b/tests/integration/test_single_file_sd_integration.py
index 6ebbfdd..6eb0365 100644
--- a/tests/integration/test_single_file_sd_integration.py
+++ b/tests/integration/test_single_file_sd_integration.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestSDFileSystemIntegration."""
+
 import json
 import logging
 import os
diff --git a/tests/test_retry_flow.py b/tests/test_retry_flow.py
index 8ba3ae5..e072d5f 100644
--- a/tests/test_retry_flow.py
+++ b/tests/test_retry_flow.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Retry flow integration test."""
+
 import logging
 import os
 import time
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
index 17b9af9..589eeec 100644
--- a/tests/unit/__init__.py
+++ b/tests/unit/__init__.py
@@ -1,18 +1,17 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Unit tests for the SDFS package."""
diff --git a/tests/unit/providers/__init__.py b/tests/unit/providers/__init__.py
index 1476c6a..166a785 100644
--- a/tests/unit/providers/__init__.py
+++ b/tests/unit/providers/__init__.py
@@ -1,18 +1,17 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Unit tests for providers package."""
diff --git a/tests/unit/providers/conftest.py b/tests/unit/providers/conftest.py
index fa5f000..c317819 100644
--- a/tests/unit/providers/conftest.py
+++ b/tests/unit/providers/conftest.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Providers tests shared fixtures."""
+
 from typing import Generator
 from unittest import mock
 
diff --git a/tests/unit/providers/test_factory.py b/tests/unit/providers/test_factory.py
index 1f9f529..38cfac9 100644
--- a/tests/unit/providers/test_factory.py
+++ b/tests/unit/providers/test_factory.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestProviderSpecificFactory."""
+
 from unittest import mock
 
 import pytest
diff --git a/tests/unit/providers/test_google_provider.py b/tests/unit/providers/test_google_provider.py
index 08a146e..af1346a 100644
--- a/tests/unit/providers/test_google_provider.py
+++ b/tests/unit/providers/test_google_provider.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestGoogleSpecific."""
+
 from pathlib import Path
 from unittest import mock
 
diff --git a/tests/unit/sd_file_system_mocks.py b/tests/unit/sd_file_system_mocks.py
index 649ec34..ccff316 100644
--- a/tests/unit/sd_file_system_mocks.py
+++ b/tests/unit/sd_file_system_mocks.py
@@ -1,19 +1,18 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Test MockFinalFileSystem for SDFS package."""
 
diff --git a/tests/unit/seismic_dms_client_mocks.py b/tests/unit/seismic_dms_client_mocks.py
index ed7c87d..9ed4fac 100644
--- a/tests/unit/seismic_dms_client_mocks.py
+++ b/tests/unit/seismic_dms_client_mocks.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Test MockResponse for SDFS package."""
+
 from typing import Any
 
 MOCK_REFRESH_TOKEN_RESPONSE_DATA = {
diff --git a/tests/unit/singleton.py b/tests/unit/singleton.py
index 5a94c02..55f3c99 100644
--- a/tests/unit/singleton.py
+++ b/tests/unit/singleton.py
@@ -1,19 +1,18 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """Singleton."""
 
diff --git a/tests/unit/test_sd_file_system.py b/tests/unit/test_sd_file_system.py
index 19fb686..c9bd004 100644
--- a/tests/unit/test_sd_file_system.py
+++ b/tests/unit/test_sd_file_system.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestSDFileSystem."""
+
 from typing import Callable
 from unittest import mock
 
diff --git a/tests/unit/test_seismic_dms_client.py b/tests/unit/test_seismic_dms_client.py
index 7ebc930..73d8c70 100644
--- a/tests/unit/test_seismic_dms_client.py
+++ b/tests/unit/test_seismic_dms_client.py
@@ -1,21 +1,21 @@
-"""
-* Copyright 2023-2024, TGS
-* Copyright 2023-2024, EPAM
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*      http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-"""
+# Copyright 2023-2024, TGS
+# Copyright 2023-2024, EPAM
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 
 """TestSeismicDmsClient."""
+
 from typing import Callable
 from unittest import mock
 
-- 
GitLab


From 06d144157723acf5c9445923fc8d1ce2bb2bce9c Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 01:29:32 +0200
Subject: [PATCH 032/111] test

---
 src/sdfs/clients/__init__.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/sdfs/clients/__init__.py b/src/sdfs/clients/__init__.py
index b855077..513ab06 100644
--- a/src/sdfs/clients/__init__.py
+++ b/src/sdfs/clients/__init__.py
@@ -13,3 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
+"""Clients."""
-- 
GitLab


From a8a8c7771c5daa74a43dde926bbf86b6d85f4d42 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 12:10:51 +0200
Subject: [PATCH 033/111] test

---
 .gitlab-ci.yml | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e0caa03..b855688 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,4 +1,8 @@
 include:
+  # ultimate
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/gitlab-ultimate.yml"
+
   # fossa
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/fossa-python.yml"
-- 
GitLab


From c88db7c0ff997affd2b97dbe824a0378a1c8b7f5 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 15:45:07 +0200
Subject: [PATCH 034/111] test

---
 .gitlab-ci.yml | 33 ++++++++++++++++++++++++++++++++-
 1 file changed, 32 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b855688..a0b53f3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,6 +12,7 @@ stages:
   - lint
   - unit_tests
   - scan
+  - integration_test
   - build
 
 
@@ -95,4 +96,34 @@ publish-package:
 #    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
 #    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
 #      when: never
-#    - if: $CI_COMMIT_TAG
\ No newline at end of file
+#    - if: $CI_COMMIT_TAG
+
+integration_tests:
+  stage: integration_tests
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - |
+      export ACCESS_TOKEN=$(curl -X POST ${AUTH_REFRESH_TOKEN_UR} \
+      --header "Content-Type: application/x-www-form-urlencoded" \
+      --data "grant_type=refresh_token" \
+      --data "refresh_token=${REFRESH_TOKEN}" \
+      --data "client_id=${CLIENT_ID}" \
+      --data "client_secret=${CLIENT_SECRET}" \
+      | jq -r '.id_token')
+    - export SD_PATH=${SD_PATH};
+    - echo "${ACCESS_TOKEN}" | head -c 10
+    - export AUTH_REFRESH_TOKEN_URL=$AUTH_REFRESH_TOKEN_UR
+    - export REFRESH_TOKEN=$REFRESH_TOKEN
+    - export CLIENT_ID=$CLIENT_ID
+    - export CLIENT_SECRET=$CLIENT_SECRET
+    - poetry install
+    - nox --session=integration-tests-3.11
+  allow_failure: true
\ No newline at end of file
-- 
GitLab


From b220f48c004a590f817ef333a3f1fd151d15362b Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 15:46:39 +0200
Subject: [PATCH 035/111] test

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a0b53f3..cea55db 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,7 +12,7 @@ stages:
   - lint
   - unit_tests
   - scan
-  - integration_test
+  - integration_tests
   - build
 
 
-- 
GitLab


From 27a24d32f632b09dd231df22dd86b6a00d0cc794 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 27 Dec 2024 16:34:05 +0200
Subject: [PATCH 036/111] include standard-setup

---
 .gitlab-ci.yml | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index cea55db..27ac46f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,4 +1,8 @@
 include:
+  # PIPELINE LOGIC
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "standard-setup.yml"
+
   # ultimate
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/gitlab-ultimate.yml"
-- 
GitLab


From decedf791074248f165090cbc3a9a5e693da1d65 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 19:08:37 +0200
Subject: [PATCH 037/111] test ci

---
 .gitlab-ci.yml | 131 +++++--------------------------------------------
 1 file changed, 11 insertions(+), 120 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 27ac46f..572c7bb 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,6 +3,14 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "standard-setup.yml"
 
+  # linter
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-linters.yml"
+
+  # BUILD
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python.yml"
+
   # ultimate
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/gitlab-ultimate.yml"
@@ -11,123 +19,6 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/fossa-python.yml"
 
-
-stages:
-  - lint
-  - unit_tests
-  - scan
-  - integration_tests
-  - build
-
-
-variables:
-  BUILD_TAG: $CI_COMMIT_TAG
-  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
-  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
-  BUILD_ID: $CI_PIPELINE_IID
-
-
-.install-deps:
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry
-    - pip install nox nox-poetry
-    - poetry --version
-
-
-linter:
-  stage: lint
-  image: python:3.11
-  #extends: .install-deps
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - poetry install
-    - poetry run ruff check .
-  allow_failure: true #allow job failure gitlab
-
-
-compile-and-unit-test:
-  stage: unit_tests
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - poetry install
-    - nox --session=unit-tests-3.11
-
-
-publish-package:
-  stage: build
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - VERSION=$(python version.py)
-    - echo ${VERSION}
-    - |
-       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
-    - echo ${CURRENT_VERSION}
-    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
-    - poetry build
-    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
-    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
-    - poetry publish --repository gitlab | tee output.txt
-    - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
-#  rules:
-#    - if: '$CI_COMMIT_BRANCH'
-#    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
-#    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
-#      when: never
-#    - if: $CI_COMMIT_TAG
-
-integration_tests:
-  stage: integration_tests
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - |
-      export ACCESS_TOKEN=$(curl -X POST ${AUTH_REFRESH_TOKEN_UR} \
-      --header "Content-Type: application/x-www-form-urlencoded" \
-      --data "grant_type=refresh_token" \
-      --data "refresh_token=${REFRESH_TOKEN}" \
-      --data "client_id=${CLIENT_ID}" \
-      --data "client_secret=${CLIENT_SECRET}" \
-      | jq -r '.id_token')
-    - export SD_PATH=${SD_PATH};
-    - echo "${ACCESS_TOKEN}" | head -c 10
-    - export AUTH_REFRESH_TOKEN_URL=$AUTH_REFRESH_TOKEN_UR
-    - export REFRESH_TOKEN=$REFRESH_TOKEN
-    - export CLIENT_ID=$CLIENT_ID
-    - export CLIENT_SECRET=$CLIENT_SECRET
-    - poetry install
-    - nox --session=integration-tests-3.11
-  allow_failure: true
\ No newline at end of file
+  # deply
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
-- 
GitLab


From ad8befac549058cb4b376444315753c5ba01f7c2 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 19:25:06 +0200
Subject: [PATCH 038/111] test ci

---
 .gitlab-ci.yml | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 572c7bb..bb76e9e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,3 +22,18 @@ include:
   # deply
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
+
+
+compile-and-unit-test:
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - nox --session=unit-tests-3.11
\ No newline at end of file
-- 
GitLab


From b4d9d4363da94b3c40040cf500cb96de8f4403dd Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 21:47:27 +0200
Subject: [PATCH 039/111] test ci

---
 .gitlab-ci.yml | 29 ++++++++++++++++++++++++++++-
 1 file changed, 28 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bb76e9e..05877db 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -36,4 +36,31 @@ compile-and-unit-test:
     - poetry --version
   script:
     - poetry install
-    - nox --session=unit-tests-3.11
\ No newline at end of file
+    - nox --session=unit-tests-3.11
+
+
+publish-package:
+  tags: ["osdu-small"]
+  stage: deploy
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+      CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
-- 
GitLab


From 973f74126e0c02479a6dc5d76592a00022968faf Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:16:39 +0200
Subject: [PATCH 040/111] test ci

---
 .gitlab-ci.yml | 101 +++++++++++++++++++++++++++++++++++++++----------
 1 file changed, 80 insertions(+), 21 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 05877db..95a062e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,14 +3,14 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "standard-setup.yml"
 
-  # linter
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-linters.yml"
-
-  # BUILD
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python.yml"
-
+#  # linter
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-linters.yml"
+#
+#  # BUILD
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python.yml"
+#
   # ultimate
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/gitlab-ultimate.yml"
@@ -19,43 +19,102 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/fossa-python.yml"
 
-  # deply
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-package.yml"
+#  # deply
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-package.yml"
 
 
+variables:
+  BUILD_TAG: $CI_COMMIT_TAG
+  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
+  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
+  BUILD_ID: $CI_PIPELINE_IID
+
+stages:
+  - lint
+  - unit_tests
+  - scan
+  - integration_tests
+  - build
+
+.install-deps:
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install poetry
+    - pip install nox nox-poetry
+    - poetry --version
+
+linter:
+  stage: lint
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install poetry nox nox-poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run ruff check .
+  allow_failure: true
+
 compile-and-unit-test:
+  stage: unit_tests
   image: python:3.11
   before_script:
     - apt-get update
     - apt-get install -y jq curl
     - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
+    - pip install poetry nox nox-poetry
     - poetry --version
   script:
     - poetry install
     - nox --session=unit-tests-3.11
 
+integration_tests:
+  stage: integration_tests
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install poetry nox nox-poetry
+    - poetry --version
+  script:
+    - |
+      export ACCESS_TOKEN=$(curl -X POST ${AUTH_REFRESH_TOKEN_UR} \
+      --header "Content-Type: application/x-www-form-urlencoded" \
+      --data "grant_type=refresh_token" \
+      --data "refresh_token=${REFRESH_TOKEN}" \
+      --data "client_id=${CLIENT_ID}" \
+      --data "client_secret=${CLIENT_SECRET}" \
+      | jq -r '.id_token')
+    - export SD_PATH=${SD_PATH}
+    - echo "${ACCESS_TOKEN}" | head -c 10
+    - export AUTH_REFRESH_TOKEN_URL=$AUTH_REFRESH_TOKEN_UR
+    - export REFRESH_TOKEN=$REFRESH_TOKEN
+    - export CLIENT_ID=$CLIENT_ID
+    - export CLIENT_SECRET=$CLIENT_SECRET
+    - poetry install
+    - nox --session=integration-tests-3.11
+  allow_failure: true
 
 publish-package:
-  tags: ["osdu-small"]
-  stage: deploy
+  stage: build
   image: python:3.11
   before_script:
     - apt-get update
     - apt-get install -y jq curl
     - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
+    - pip install poetry nox nox-poetry
     - poetry --version
   script:
     - VERSION=$(python version.py)
     - echo ${VERSION}
     - |
-      CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
     - echo ${CURRENT_VERSION}
     - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
     - poetry build
@@ -63,4 +122,4 @@ publish-package:
     - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
     - poetry publish --repository gitlab | tee output.txt
     - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
+    - cat published_version.txt
\ No newline at end of file
-- 
GitLab


From 816fc523f70fdd9199f16544193e24e755a831d0 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:19:27 +0200
Subject: [PATCH 041/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 95a062e..153b4fc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -31,6 +31,7 @@ variables:
   BUILD_ID: $CI_PIPELINE_IID
 
 stages:
+  - review
   - lint
   - unit_tests
   - scan
-- 
GitLab


From 266e66c0e766c929e0b34fc3a7bf3b2ad73b0662 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:25:44 +0200
Subject: [PATCH 042/111] test ci

---
 .gitlab-ci.yml | 16 ----------------
 1 file changed, 16 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 153b4fc..393090d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -30,22 +30,6 @@ variables:
   BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
   BUILD_ID: $CI_PIPELINE_IID
 
-stages:
-  - review
-  - lint
-  - unit_tests
-  - scan
-  - integration_tests
-  - build
-
-.install-deps:
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry
-    - pip install nox nox-poetry
-    - poetry --version
 
 linter:
   stage: lint
-- 
GitLab


From bfbe36e98ee658ea1e24e906bc2859a81ab819e0 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:26:43 +0200
Subject: [PATCH 043/111] test ci

---
 .gitlab-ci.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 393090d..eeeb4ee 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,9 +3,9 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "standard-setup.yml"
 
-#  # linter
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-linters.yml"
+  # linter
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-linters.yml"
 #
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
-- 
GitLab


From ffb33b84f5106cb8dc8197e26c0a6909dc58801a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:28:13 +0200
Subject: [PATCH 044/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index eeeb4ee..ce732c6 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -46,7 +46,7 @@ linter:
   allow_failure: true
 
 compile-and-unit-test:
-  stage: unit_tests
+  stage: compile-and-unit-test
   image: python:3.11
   before_script:
     - apt-get update
-- 
GitLab


From f898710e677ac8e6011f555fec45d1bf3c781741 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:33:36 +0200
Subject: [PATCH 045/111] test ci

---
 .gitlab-ci.yml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ce732c6..0fbd387 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,10 +7,10 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 #
-#  # BUILD
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python.yml"
-#
+  # BUILD
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python.yml"
+
   # ultimate
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/gitlab-ultimate.yml"
@@ -32,7 +32,7 @@ variables:
 
 
 linter:
-  stage: lint
+  stage: linter
   image: python:3.11
   before_script:
     - apt-get update
-- 
GitLab


From 1b74e9f32818b67fc066924bf2e93db5706fce50 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:36:06 +0200
Subject: [PATCH 046/111] test ci

---
 .gitlab-ci.yml | 111 ++++++-------------------------------------------
 1 file changed, 13 insertions(+), 98 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0fbd387..165211c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,107 +4,22 @@ include:
     file: "standard-setup.yml"
 
   # linter
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-linters.yml"
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-linters.yml"
 #
-  # BUILD
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python.yml"
-
-  # ultimate
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "scanners/gitlab-ultimate.yml"
-
-  # fossa
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "scanners/fossa-python.yml"
+#  # BUILD
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python.yml"
+#
+#  # ultimate
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "scanners/gitlab-ultimate.yml"
+#
+#  # fossa
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "scanners/fossa-python.yml"
 
 #  # deply
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
-
-variables:
-  BUILD_TAG: $CI_COMMIT_TAG
-  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
-  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
-  BUILD_ID: $CI_PIPELINE_IID
-
-
-linter:
-  stage: linter
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry nox nox-poetry
-    - poetry --version
-  script:
-    - poetry install
-    - poetry run ruff check .
-  allow_failure: true
-
-compile-and-unit-test:
-  stage: compile-and-unit-test
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry nox nox-poetry
-    - poetry --version
-  script:
-    - poetry install
-    - nox --session=unit-tests-3.11
-
-integration_tests:
-  stage: integration_tests
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry nox nox-poetry
-    - poetry --version
-  script:
-    - |
-      export ACCESS_TOKEN=$(curl -X POST ${AUTH_REFRESH_TOKEN_UR} \
-      --header "Content-Type: application/x-www-form-urlencoded" \
-      --data "grant_type=refresh_token" \
-      --data "refresh_token=${REFRESH_TOKEN}" \
-      --data "client_id=${CLIENT_ID}" \
-      --data "client_secret=${CLIENT_SECRET}" \
-      | jq -r '.id_token')
-    - export SD_PATH=${SD_PATH}
-    - echo "${ACCESS_TOKEN}" | head -c 10
-    - export AUTH_REFRESH_TOKEN_URL=$AUTH_REFRESH_TOKEN_UR
-    - export REFRESH_TOKEN=$REFRESH_TOKEN
-    - export CLIENT_ID=$CLIENT_ID
-    - export CLIENT_SECRET=$CLIENT_SECRET
-    - poetry install
-    - nox --session=integration-tests-3.11
-  allow_failure: true
-
-publish-package:
-  stage: build
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install poetry nox nox-poetry
-    - poetry --version
-  script:
-    - VERSION=$(python version.py)
-    - echo ${VERSION}
-    - |
-       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
-    - echo ${CURRENT_VERSION}
-    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
-    - poetry build
-    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
-    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
-    - poetry publish --repository gitlab | tee output.txt
-    - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
\ No newline at end of file
-- 
GitLab


From b61ad403b753a4cca189244af780c4bde15d3631 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:39:22 +0200
Subject: [PATCH 047/111] test ci

---
 .gitlab-ci.yml | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 165211c..7e9c524 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -23,3 +23,16 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
+compile-and-unit-test:
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - nox --session=unit-tests-3.11
\ No newline at end of file
-- 
GitLab


From 62fbd2e6264a3016daab94d0bf72bc34b32f5dd3 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:41:15 +0200
Subject: [PATCH 048/111] test ci

---
 .gitlab-ci.yml | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7e9c524..230b471 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,10 +3,9 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "standard-setup.yml"
 
-  # linter
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-linters.yml"
-#
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-linters.yml"
+
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python.yml"
-- 
GitLab


From 1a842097bd6e503f4a73699d226a0b3fb0884656 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:44:07 +0200
Subject: [PATCH 049/111] test ci

---
 .gitlab-ci.yml | 19 ++++---------------
 1 file changed, 4 insertions(+), 15 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 230b471..2cf963b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -19,19 +19,8 @@ include:
 #    file: "scanners/fossa-python.yml"
 
 #  # deply
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-package.yml"
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
 
-compile-and-unit-test:
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - poetry install
-    - nox --session=unit-tests-3.11
\ No newline at end of file
+python-static-analysis:
+  allow_failure: true
-- 
GitLab


From 23a37dd79f35d87188201e2c8b250cb1b7758201 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:44:48 +0200
Subject: [PATCH 050/111] test ci

---
 .gitlab-ci.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2cf963b..48b7f75 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -18,9 +18,9 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
-#  # deply
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-package.yml"
+##  # deply
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-package.yml"
 
 python-static-analysis:
   allow_failure: true
-- 
GitLab


From 0244435d88f48ae05c5c301a79e47a9804c26181 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:45:52 +0200
Subject: [PATCH 051/111] test ci

---
 .gitlab-ci.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 48b7f75..6217fdd 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,7 @@
 include:
-  # PIPELINE LOGIC
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "standard-setup.yml"
+#  # PIPELINE LOGIC
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "standard-setup.yml"
 
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
-- 
GitLab


From f10bd195e7d6f8e3534e26f8343b5d6d962a4f0d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:47:17 +0200
Subject: [PATCH 052/111] test ci

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6217fdd..f2a06ff 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,6 +2,8 @@ include:
 #  # PIPELINE LOGIC
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "standard-setup.yml"
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
 
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
-- 
GitLab


From 25ac85bf8c94fff7702e25738ae417e421b3ca4e Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:48:48 +0200
Subject: [PATCH 053/111] test ci

---
 .gitlab-ci.yml | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f2a06ff..3618f88 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -24,5 +24,10 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
+stages:
+  - linters
+  - test
+
+
 python-static-analysis:
   allow_failure: true
-- 
GitLab


From f719decdd126f5cc49015efff16c1b7f44606c72 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:49:32 +0200
Subject: [PATCH 054/111] test ci

---
 .gitlab-ci.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3618f88..ae129da 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,8 +2,6 @@ include:
 #  # PIPELINE LOGIC
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "standard-setup.yml"
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-package.yml"
 
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
-- 
GitLab


From 76c844a875a80a895b1ae15ebebf6e5474f5fb15 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 22:58:01 +0200
Subject: [PATCH 055/111] test ci

---
 .gitlab-ci.yml | 15 ++++++++++++++-
 1 file changed, 14 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ae129da..7c4e7e7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,5 +27,18 @@ stages:
   - test
 
 
-python-static-analysis:
+pylint:
+  tags: ["osdu-small"]
+  image: python:3.11
   allow_failure: true
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run ruff check .
-- 
GitLab


From 8f2e12e73f20c100f08be4f7c7d07f83c7874fdc Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 23:07:00 +0200
Subject: [PATCH 056/111] test ci

---
 .gitlab-ci.yml | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7c4e7e7..52349a3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -26,6 +26,22 @@ stages:
   - linters
   - test
 
+isort:
+  tags: ["osdu-small"]
+  image: python:3.11
+  allow_failure: true
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run isort . --check-only
+
 
 pylint:
   tags: ["osdu-small"]
-- 
GitLab


From 94d3d16d7ba81eb146124216be75c80712469d16 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 23:12:55 +0200
Subject: [PATCH 057/111] test ci

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 52349a3..c6d6794 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -40,6 +40,8 @@ isort:
     - poetry --version
   script:
     - poetry install
+    - poetry add --dev isort
+    - poetry show isort
     - poetry run isort . --check-only
 
 
-- 
GitLab


From e612f238f41eff8f4deb6fcba1eefa5a7648e63e Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sat, 28 Dec 2024 23:20:16 +0200
Subject: [PATCH 058/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c6d6794..1f3dac3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -42,7 +42,7 @@ isort:
     - poetry install
     - poetry add --dev isort
     - poetry show isort
-    - poetry run isort . --check-only
+    - poetry run isort .
 
 
 pylint:
-- 
GitLab


From 55f1cda431d7a0deaf401c8a334ddb0ca57e99b1 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:03:45 +0200
Subject: [PATCH 059/111] test ci

---
 .gitlab-ci.yml | 21 ++++++---------------
 1 file changed, 6 insertions(+), 15 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1f3dac3..aa23340 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,22 +27,13 @@ stages:
   - test
 
 isort:
-  tags: ["osdu-small"]
-  image: python:3.11
-  allow_failure: true
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
+  needs: [ ]
+  extends: [ ]
   script:
-    - poetry install
-    - poetry add --dev isort
-    - poetry show isort
-    - poetry run isort .
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
 
 
 pylint:
-- 
GitLab


From cbb1c3c877f156c4b8986ad087da2de6467e1e31 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:04:59 +0200
Subject: [PATCH 060/111] test ci

---
 .gitlab-ci.yml | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index aa23340..f7aa99b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -35,6 +35,14 @@ isort:
     - $DISABLED == 'true'
 
 
+python-static-analysis:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
 
 pylint:
   tags: ["osdu-small"]
-- 
GitLab


From 9ebb05c8689d831b080e353d17d7baac9298016d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:11:30 +0200
Subject: [PATCH 061/111] test ci

---
 .gitlab-ci.yml | 42 +++++++++++++++++++++++++++++++++++-------
 1 file changed, 35 insertions(+), 7 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f7aa99b..315765b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,8 +1,9 @@
 include:
-#  # PIPELINE LOGIC
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "standard-setup.yml"
+  # PIPELINE LOGIC
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "standard-setup.yml"
 
+  # linters
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
@@ -18,13 +19,16 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
-##  # deply
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-package.yml"
+#  # deply
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
+
 
 stages:
   - linters
   - test
+  - deploy
+
 
 isort:
   needs: [ ]
@@ -45,7 +49,6 @@ python-static-analysis:
 
 
 pylint:
-  tags: ["osdu-small"]
   image: python:3.11
   allow_failure: true
   before_script:
@@ -59,3 +62,28 @@ pylint:
   script:
     - poetry install
     - poetry run ruff check .
+
+
+publish-package:
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
\ No newline at end of file
-- 
GitLab


From 386553a1eb3c0846ce2cb967516af0c68967860d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:12:32 +0200
Subject: [PATCH 062/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 315765b..a6d564c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -25,6 +25,7 @@ include:
 
 
 stages:
+  - review
   - linters
   - test
   - deploy
-- 
GitLab


From 61e606677a0e3363898394ccb02268e45e930578 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:18:01 +0200
Subject: [PATCH 063/111] test ci

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a6d564c..1c243ea 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -50,6 +50,7 @@ python-static-analysis:
 
 
 pylint:
+  tags: ['osdu-small']
   image: python:3.11
   allow_failure: true
   before_script:
@@ -66,6 +67,7 @@ pylint:
 
 
 publish-package:
+  tags: ['osdu-small']
   image: python:3.11
   before_script:
     - apt-get update
-- 
GitLab


From 4218919cf214cb3f93844e18afafd46b92a8ad11 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:44:54 +0200
Subject: [PATCH 064/111] test ci

---
 .gitlab-ci.yml                   | 130 ++++++++++++++++---------------
 devops/osdu/pipeline/deploy.yml  |  30 +++++++
 devops/osdu/pipeline/linters.yml |  32 ++++++++
 3 files changed, 129 insertions(+), 63 deletions(-)
 create mode 100644 devops/osdu/pipeline/deploy.yml
 create mode 100644 devops/osdu/pipeline/linters.yml

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1c243ea..e61395f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,6 +7,8 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
+  - local: "/devops/osdu/pipeline/linters.yml"
+
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python.yml"
@@ -23,70 +25,72 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-
-stages:
-  - review
-  - linters
-  - test
-  - deploy
-
-
-isort:
-  needs: [ ]
-  extends: [ ]
-  script:
-    echo 'empty'
-  only:
-    - $DISABLED == 'true'
-
-
-python-static-analysis:
-  needs: [ ]
-  extends: [ ]
-  script:
-    echo 'empty'
-  only:
-    - $DISABLED == 'true'
+  - local: "devops/osdu/pipeline/deploy.yml"
 
 
-pylint:
-  tags: ['osdu-small']
-  image: python:3.11
-  allow_failure: true
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - poetry install
-    - poetry run ruff check .
+#stages:
+#  - review
+#  - linters
+#  - test
+#  - deploy
 
 
-publish-package:
-  tags: ['osdu-small']
-  image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - VERSION=$(python version.py)
-    - echo ${VERSION}
-    - |
-       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
-    - echo ${CURRENT_VERSION}
-    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
-    - poetry build
-    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
-    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
-    - poetry publish --repository gitlab | tee output.txt
-    - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
\ No newline at end of file
+#isort:
+#  needs: [ ]
+#  extends: [ ]
+#  script:
+#    echo 'empty'
+#  only:
+#    - $DISABLED == 'true'
+#
+#
+#python-static-analysis:
+#  needs: [ ]
+#  extends: [ ]
+#  script:
+#    echo 'empty'
+#  only:
+#    - $DISABLED == 'true'
+#
+#
+#pylint:
+#  tags: ['osdu-small']
+#  image: python:3.11
+#  allow_failure: true
+#  before_script:
+#    - apt-get update
+#    - apt-get install -y jq curl
+#    - pip install --upgrade pip
+#    - pip install nox
+#    - pip install nox-poetry
+#    - pip install poetry
+#    - poetry --version
+#  script:
+#    - poetry install
+#    - poetry run ruff check .
+#
+#
+#publish-package:
+#  tags: ['osdu-small']
+#  image: python:3.11
+#  before_script:
+#    - apt-get update
+#    - apt-get install -y jq curl
+#    - pip install --upgrade pip
+#    - pip install nox
+#    - pip install nox-poetry
+#    - pip install poetry
+#    - poetry --version
+#  script:
+#    - VERSION=$(python version.py)
+#    - echo ${VERSION}
+#    - |
+#       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+#    - echo ${CURRENT_VERSION}
+#    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+#    - poetry build
+#    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+#    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+#    - poetry publish --repository gitlab | tee output.txt
+#    - echo ${VERSION} > published_version.txt
+#    - cat published_version.txt
\ No newline at end of file
diff --git a/devops/osdu/pipeline/deploy.yml b/devops/osdu/pipeline/deploy.yml
new file mode 100644
index 0000000..e19c356
--- /dev/null
+++ b/devops/osdu/pipeline/deploy.yml
@@ -0,0 +1,30 @@
+variables:
+  BUILD_TAG: $CI_COMMIT_TAG
+  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
+  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
+  BUILD_ID: $CI_PIPELINE_IID
+
+
+publish-package:
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
\ No newline at end of file
diff --git a/devops/osdu/pipeline/linters.yml b/devops/osdu/pipeline/linters.yml
new file mode 100644
index 0000000..1563a7d
--- /dev/null
+++ b/devops/osdu/pipeline/linters.yml
@@ -0,0 +1,32 @@
+isort:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+python-static-analysis:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+pylint:
+  image: python:3.11
+  allow_failure: true
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run ruff check .
-- 
GitLab


From 87234259a2b51e7c4a54e9557f29bf1e72a6e67a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:56:04 +0200
Subject: [PATCH 065/111] test ci

---
 .gitlab-ci.yml | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e61395f..5a70a67 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -25,7 +25,10 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-  - local: "devops/osdu/pipeline/deploy.yml"
+  - local: "/devops/osdu/pipeline/deploy.yml"
+
+fossa-analyze:
+  image: $CI_REGISTRY/divido/fossa-with-cache:v0.9-jdk17
 
 
 #stages:
-- 
GitLab


From 877f67fde1f1faa1c86a24da113f816d02dcca98 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:56:40 +0200
Subject: [PATCH 066/111] test ci

---
 .gitlab-ci.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5a70a67..051d5d0 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,8 +27,6 @@ include:
 
   - local: "/devops/osdu/pipeline/deploy.yml"
 
-fossa-analyze:
-  image: $CI_REGISTRY/divido/fossa-with-cache:v0.9-jdk17
 
 
 #stages:
-- 
GitLab


From 5e1d835968bde59e9406497247335d3681c7f2cd Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 00:58:27 +0200
Subject: [PATCH 067/111] test ci

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 051d5d0..9c4384e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,7 +7,7 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
-  - local: "/devops/osdu/pipeline/linters.yml"
+  - local: "devops/osdu/pipeline/linters.yml"
 
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
@@ -25,7 +25,7 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-  - local: "/devops/osdu/pipeline/deploy.yml"
+  - local: "devops/osdu/pipeline/deploy.yml"
 
 
 
-- 
GitLab


From 4815667025ad59d3953f52c710006aaa56375072 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 01:03:37 +0200
Subject: [PATCH 068/111] test ci

---
 .gitlab-ci.yml                                |  6 ++--
 devops/osdu/pipeline/linters.yml              | 32 ------------------
 .../{deploy.yml => override-stages.yml}       | 33 +++++++++++++++++++
 3 files changed, 36 insertions(+), 35 deletions(-)
 delete mode 100644 devops/osdu/pipeline/linters.yml
 rename devops/osdu/pipeline/{deploy.yml => override-stages.yml} (65%)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 9c4384e..31d85d4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -5,9 +5,8 @@ include:
 
   # linters
   - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-linters.yml"
+    file: "build/python-override-stages.yml"
 
-  - local: "devops/osdu/pipeline/linters.yml"
 
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
@@ -25,7 +24,8 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-  - local: "devops/osdu/pipeline/deploy.yml"
+  - local: "devops/osdu/pipeline/override-stages.yml"
+
 
 
 
diff --git a/devops/osdu/pipeline/linters.yml b/devops/osdu/pipeline/linters.yml
deleted file mode 100644
index 1563a7d..0000000
--- a/devops/osdu/pipeline/linters.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-isort:
-  needs: [ ]
-  extends: [ ]
-  script:
-    echo 'empty'
-  only:
-    - $DISABLED == 'true'
-
-
-python-static-analysis:
-  needs: [ ]
-  extends: [ ]
-  script:
-    echo 'empty'
-  only:
-    - $DISABLED == 'true'
-
-
-pylint:
-  image: python:3.11
-  allow_failure: true
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
-  script:
-    - poetry install
-    - poetry run ruff check .
diff --git a/devops/osdu/pipeline/deploy.yml b/devops/osdu/pipeline/override-stages.yml
similarity index 65%
rename from devops/osdu/pipeline/deploy.yml
rename to devops/osdu/pipeline/override-stages.yml
index e19c356..8b9c89c 100644
--- a/devops/osdu/pipeline/deploy.yml
+++ b/devops/osdu/pipeline/override-stages.yml
@@ -1,3 +1,36 @@
+isort:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+python-static-analysis:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+pylint:
+  image: python:3.11
+  allow_failure: true
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run ruff check .
+
 variables:
   BUILD_TAG: $CI_COMMIT_TAG
   CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
-- 
GitLab


From eb5cc369112dbb18394fd34de10c854e79b2a41a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 01:11:04 +0200
Subject: [PATCH 069/111] test ci

---
 .gitlab-ci.yml               |  6 ++--
 devops/osdu/build/python.yml | 63 ++++++++++++++++++++++++++++++++++++
 2 files changed, 66 insertions(+), 3 deletions(-)
 create mode 100644 devops/osdu/build/python.yml

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 31d85d4..615b7ba 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -5,9 +5,9 @@ include:
 
   # linters
   - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-override-stages.yml"
-
+    file: "build/python-linters.yml"
 
+  - local: "devops/osdu/build/python.yml"
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python.yml"
@@ -24,7 +24,7 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-  - local: "devops/osdu/pipeline/override-stages.yml"
+  #- local: "devops/osdu/pipeline/override-stages.yml"
 
 
 
diff --git a/devops/osdu/build/python.yml b/devops/osdu/build/python.yml
new file mode 100644
index 0000000..8b9c89c
--- /dev/null
+++ b/devops/osdu/build/python.yml
@@ -0,0 +1,63 @@
+isort:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+python-static-analysis:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+pylint:
+  image: python:3.11
+  allow_failure: true
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - poetry install
+    - poetry run ruff check .
+
+variables:
+  BUILD_TAG: $CI_COMMIT_TAG
+  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
+  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
+  BUILD_ID: $CI_PIPELINE_IID
+
+
+publish-package:
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
\ No newline at end of file
-- 
GitLab


From 680414d07c4b5b9a43b1d47035d8b50576c92705 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 01:13:40 +0200
Subject: [PATCH 070/111] test ci

---
 .gitlab-ci.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 615b7ba..434d4be 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,7 +7,8 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
-  - local: "devops/osdu/build/python.yml"
+  - local: "devops/osdu/pipeline/override-stages.yml"
+
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python.yml"
-- 
GitLab


From fd5914f83035ff5cc0f99dbb0231a4deeecf5c46 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 01:18:16 +0200
Subject: [PATCH 071/111] test ci

---
 .gitlab-ci.yml                                 | 18 +++++++++---------
 ...{override-stages.yml => override-stage.yml} |  0
 2 files changed, 9 insertions(+), 9 deletions(-)
 rename devops/osdu/pipeline/{override-stages.yml => override-stage.yml} (100%)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 434d4be..432179d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,4 +1,5 @@
 include:
+
   # PIPELINE LOGIC
   - project: "osdu/platform/ci-cd-pipelines"
     file: "standard-setup.yml"
@@ -7,7 +8,14 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
-  - local: "devops/osdu/pipeline/override-stages.yml"
+  # deply
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
+
+  - local: "devops/osdu/pipeline/override-stage.yml"
+
+  #- local: "devops/osdu/pipeline/override-stage.yml"
+
 
 #  # BUILD
 #  - project: "osdu/platform/ci-cd-pipelines"
@@ -21,14 +29,6 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
-#  # deply
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-package.yml"
-
-  #- local: "devops/osdu/pipeline/override-stages.yml"
-
-
-
 
 #stages:
 #  - review
diff --git a/devops/osdu/pipeline/override-stages.yml b/devops/osdu/pipeline/override-stage.yml
similarity index 100%
rename from devops/osdu/pipeline/override-stages.yml
rename to devops/osdu/pipeline/override-stage.yml
-- 
GitLab


From 76fd4efb7bd2a63166ed8ed3a8b29bfcd0003dc5 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Sun, 29 Dec 2024 02:11:53 +0200
Subject: [PATCH 072/111] test ci

---
 devops/osdu/build/python.yml | 68 ++++++++++++++++++++++++------------
 1 file changed, 45 insertions(+), 23 deletions(-)

diff --git a/devops/osdu/build/python.yml b/devops/osdu/build/python.yml
index 8b9c89c..f8bd46f 100644
--- a/devops/osdu/build/python.yml
+++ b/devops/osdu/build/python.yml
@@ -1,3 +1,30 @@
+variables:
+  BUILD_TAG: $CI_COMMIT_TAG
+  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
+  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
+  BUILD_ID: $CI_PIPELINE_IID
+
+.python:
+  image: python:3.11
+  tags:
+  - osdu-medium
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+
+
+".skipForTriggeringMergeRequests":
+  rules:
+  - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
+    when: never
+  - when: on_success
+
+
 isort:
   needs: [ ]
   extends: [ ]
@@ -19,35 +46,15 @@ python-static-analysis:
 pylint:
   image: python:3.11
   allow_failure: true
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
+  extends: .python
   script:
     - poetry install
     - poetry run ruff check .
 
-variables:
-  BUILD_TAG: $CI_COMMIT_TAG
-  CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH
-  BUILD_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
-  BUILD_ID: $CI_PIPELINE_IID
-
 
 publish-package:
   image: python:3.11
-  before_script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install --upgrade pip
-    - pip install nox
-    - pip install nox-poetry
-    - pip install poetry
-    - poetry --version
+  extends: .python
   script:
     - VERSION=$(python version.py)
     - echo ${VERSION}
@@ -60,4 +67,19 @@ publish-package:
     - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
     - poetry publish --repository gitlab | tee output.txt
     - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
\ No newline at end of file
+    - cat published_version.txt
+
+
+compile-and-unit-test:
+  stage: build
+  image: python:3.11
+  extends:
+    - ".python"
+    - ".skipForTriggeringMergeRequests"
+  script:
+    - poetry install
+    - nox --session=unit-tests-3.11
+  rules:
+    - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
+      when: never
+    - when: on_success
-- 
GitLab


From 76f8efb4cc2a01197bb124b775846811a04834ca Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 15:56:22 +0200
Subject: [PATCH 073/111] test ci

---
 .gitlab-ci.yml               | 14 +++++++-------
 devops/osdu/build/python.yml | 30 +++++++++++++++---------------
 2 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 432179d..3bbca93 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -21,13 +21,13 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python.yml"
 #
-#  # ultimate
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "scanners/gitlab-ultimate.yml"
-#
-#  # fossa
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "scanners/fossa-python.yml"
+  # ultimate
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/gitlab-ultimate.yml"
+
+  # fossa
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/fossa-python.yml"
 
 
 #stages:
diff --git a/devops/osdu/build/python.yml b/devops/osdu/build/python.yml
index f8bd46f..8c0ae2e 100644
--- a/devops/osdu/build/python.yml
+++ b/devops/osdu/build/python.yml
@@ -25,6 +25,21 @@ variables:
   - when: on_success
 
 
+compile-and-unit-test:
+  stage: build
+  image: python:3.11
+  extends:
+    - ".python"
+    - ".skipForTriggeringMergeRequests"
+  script:
+    - poetry install
+    - nox --session=unit-tests-3.11
+  rules:
+    - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
+      when: never
+    - when: on_success
+
+
 isort:
   needs: [ ]
   extends: [ ]
@@ -68,18 +83,3 @@ publish-package:
     - poetry publish --repository gitlab | tee output.txt
     - echo ${VERSION} > published_version.txt
     - cat published_version.txt
-
-
-compile-and-unit-test:
-  stage: build
-  image: python:3.11
-  extends:
-    - ".python"
-    - ".skipForTriggeringMergeRequests"
-  script:
-    - poetry install
-    - nox --session=unit-tests-3.11
-  rules:
-    - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
-      when: never
-    - when: on_success
-- 
GitLab


From 0f5b5a6f1a7e5cecf10a851be6a03126cae1f225 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:16:12 +0200
Subject: [PATCH 074/111] test ci

---
 .gitlab-ci.yml | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3bbca93..4f21ac7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -17,10 +17,10 @@ include:
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
 
-#  # BUILD
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python.yml"
-#
+  # BUILD
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python.yml"
+
   # ultimate
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/gitlab-ultimate.yml"
@@ -30,11 +30,11 @@ include:
     file: "scanners/fossa-python.yml"
 
 
-#stages:
-#  - review
-#  - linters
-#  - test
-#  - deploy
+stages:
+  - review
+  - linters
+  - test
+  - deploy
 
 
 #isort:
-- 
GitLab


From dfff5121140745c1977884688aa015c11483634c Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:19:09 +0200
Subject: [PATCH 075/111] test ci

---
 .gitlab-ci.yml | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4f21ac7..158151f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,8 +1,8 @@
 include:
 
-  # PIPELINE LOGIC
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "standard-setup.yml"
+#  # PIPELINE LOGIC
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "standard-setup.yml"
 
   # linters
   - project: "osdu/platform/ci-cd-pipelines"
@@ -12,7 +12,8 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-package.yml"
 
-  - local: "devops/osdu/pipeline/override-stage.yml"
+  - local: "devops/osdu/build/python.yml"
+  #- local: "devops/osdu/pipeline/override-stage.yml"
 
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
-- 
GitLab


From 277f0f55af058438558edb77b460e8bebbe32d6b Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:19:47 +0200
Subject: [PATCH 076/111] test ci

---
 .gitlab-ci.yml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 158151f..684af60 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -31,11 +31,11 @@ include:
     file: "scanners/fossa-python.yml"
 
 
-stages:
-  - review
-  - linters
-  - test
-  - deploy
+#stages:
+#  - review
+#  - linters
+#  - test
+#  - deploy
 
 
 #isort:
-- 
GitLab


From b7099c23a4401845d7e3b3062d3268ab34ba3708 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:22:26 +0200
Subject: [PATCH 077/111] test ci

---
 .gitlab-ci.yml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 684af60..40c77e4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,9 +8,9 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
-  # deply
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python-package.yml"
+#  # deply
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-package.yml"
 
   - local: "devops/osdu/build/python.yml"
   #- local: "devops/osdu/pipeline/override-stage.yml"
@@ -31,9 +31,9 @@ include:
     file: "scanners/fossa-python.yml"
 
 
-#stages:
+stages:
 #  - review
-#  - linters
+  - linters
 #  - test
 #  - deploy
 
-- 
GitLab


From 63482179e5abf43792caebd3ed242a24907872f1 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:23:38 +0200
Subject: [PATCH 078/111] test ci

---
 .gitlab-ci.yml | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 40c77e4..3b9d618 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,8 +12,8 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
-  - local: "devops/osdu/build/python.yml"
-  #- local: "devops/osdu/pipeline/override-stage.yml"
+  #- local: "devops/osdu/build/python.yml"
+  - local: "devops/osdu/pipeline/override-stage.yml"
 
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
@@ -34,6 +34,7 @@ include:
 stages:
 #  - review
   - linters
+  - build
 #  - test
 #  - deploy
 
-- 
GitLab


From 5b95111a90408cea1c4287199351b229591750a2 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:25:56 +0200
Subject: [PATCH 079/111] test ci

---
 .gitlab-ci.yml | 30 ++++++++++++++++--------------
 1 file changed, 16 insertions(+), 14 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3b9d618..0146a10 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -18,23 +18,25 @@ include:
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
 
-  # BUILD
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "build/python.yml"
-
-  # ultimate
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "scanners/gitlab-ultimate.yml"
-
-  # fossa
-  - project: "osdu/platform/ci-cd-pipelines"
-    file: "scanners/fossa-python.yml"
-
+#  # BUILD
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python.yml"
+#
+#  # ultimate
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "scanners/gitlab-ultimate.yml"
+#
+#  # fossa
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "scanners/fossa-python.yml"
 
 stages:
-#  - review
   - linters
-  - build
+
+#stages:
+#  - review
+#  - linters
+#  - build
 #  - test
 #  - deploy
 
-- 
GitLab


From 1ab737741114599892d278437033e0125b3e255a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:27:16 +0200
Subject: [PATCH 080/111] test ci

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0146a10..de301b8 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,8 +12,8 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
-  #- local: "devops/osdu/build/python.yml"
-  - local: "devops/osdu/pipeline/override-stage.yml"
+  - local: "devops/osdu/build/python.yml"
+  #- local: "devops/osdu/pipeline/override-stage.yml"
 
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
-- 
GitLab


From 3eea8b7e07280ba649d1e1c57fbb346f0aeeb06f Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:28:36 +0200
Subject: [PATCH 081/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index de301b8..25dd444 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -32,6 +32,7 @@ include:
 
 stages:
   - linters
+  - build
 
 #stages:
 #  - review
-- 
GitLab


From 04b105072608f66df2c0110c32b71eaa6abc528a Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 30 Dec 2024 16:30:38 +0200
Subject: [PATCH 082/111] test ci

---
 .gitlab-ci.yml               |  2 +-
 devops/osdu/build/python.yml | 58 ++++++++++++++++++------------------
 2 files changed, 30 insertions(+), 30 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 25dd444..76bf581 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -32,7 +32,7 @@ include:
 
 stages:
   - linters
-  - build
+#  - build
 
 #stages:
 #  - review
diff --git a/devops/osdu/build/python.yml b/devops/osdu/build/python.yml
index 8c0ae2e..51eb499 100644
--- a/devops/osdu/build/python.yml
+++ b/devops/osdu/build/python.yml
@@ -25,19 +25,19 @@ variables:
   - when: on_success
 
 
-compile-and-unit-test:
-  stage: build
-  image: python:3.11
-  extends:
-    - ".python"
-    - ".skipForTriggeringMergeRequests"
-  script:
-    - poetry install
-    - nox --session=unit-tests-3.11
-  rules:
-    - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
-      when: never
-    - when: on_success
+#compile-and-unit-test:
+#  stage: build
+#  image: python:3.11
+#  extends:
+#    - ".python"
+#    - ".skipForTriggeringMergeRequests"
+#  script:
+#    - poetry install
+#    - nox --session=unit-tests-3.11
+#  rules:
+#    - if: "$CI_COMMIT_REF_NAME !~ /^trusted-/ && $CI_MERGE_REQUEST_ID"
+#      when: never
+#    - when: on_success
 
 
 isort:
@@ -67,19 +67,19 @@ pylint:
     - poetry run ruff check .
 
 
-publish-package:
-  image: python:3.11
-  extends: .python
-  script:
-    - VERSION=$(python version.py)
-    - echo ${VERSION}
-    - |
-       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
-    - echo ${CURRENT_VERSION}
-    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
-    - poetry build
-    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
-    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
-    - poetry publish --repository gitlab | tee output.txt
-    - echo ${VERSION} > published_version.txt
-    - cat published_version.txt
+#publish-package:
+#  image: python:3.11
+#  extends: .python
+#  script:
+#    - VERSION=$(python version.py)
+#    - echo ${VERSION}
+#    - |
+#       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+#    - echo ${CURRENT_VERSION}
+#    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+#    - poetry build
+#    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+#    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+#    - poetry publish --repository gitlab | tee output.txt
+#    - echo ${VERSION} > published_version.txt
+#    - cat published_version.txt
-- 
GitLab


From 72f31e0adbb08d7332daf944d48ebe0f753661ea Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 2 Jan 2025 14:02:29 +0200
Subject: [PATCH 083/111] test ci

---
 .gitlab-ci.yml | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 76bf581..4926ff3 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,11 +8,12 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
+  - local: "devops/osdu/build/python.yml"
 #  # deply
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
 
-  - local: "devops/osdu/build/python.yml"
+
   #- local: "devops/osdu/pipeline/override-stage.yml"
 
   #- local: "devops/osdu/pipeline/override-stage.yml"
@@ -30,8 +31,8 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
-stages:
-  - linters
+#stages:
+#  - linters
 #  - build
 
 #stages:
-- 
GitLab


From 74c8598934b2419ff34545cd8ba00d4154ac9b6d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 2 Jan 2025 14:24:37 +0200
Subject: [PATCH 084/111] test ci

---
 .gitlab-ci.yml                       |  3 +-
 devops/osdu/build/python-linters.yml | 52 ++++++++++++++++++++++++++++
 2 files changed, 54 insertions(+), 1 deletion(-)
 create mode 100644 devops/osdu/build/python-linters.yml

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4926ff3..80788fc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,7 +8,8 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
 
-  - local: "devops/osdu/build/python.yml"
+  #- local: "devops/osdu/build/python.yml"
+  - local: "devops/osdu/build/python-linters.yml"
 #  # deply
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "build/python-package.yml"
diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
new file mode 100644
index 0000000..999237d
--- /dev/null
+++ b/devops/osdu/build/python-linters.yml
@@ -0,0 +1,52 @@
+variables:
+  SRC_FILES: "src/"
+  REQUIREMENTS_FILE: "pyproject.toml"
+
+isort:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+
+python-static-analysis:
+  needs: [ ]
+  extends: [ ]
+  script:
+    echo 'empty'
+  only:
+    - $DISABLED == 'true'
+
+ruff:
+  tags: ["osdu-small"]
+  image: python:3.11-slim
+  stage: linters
+  allow_failure: false
+  script:
+    - pip install poetry
+    - poetry install --no-dev
+    - poetry run ruff check .
+
+pylint:
+  tags: ["osdu-small"]
+  image: python:3.11-slim
+  stage: linters
+  allow_failure: true
+  script:
+    - pip install poetry
+    - poetry install --no-dev
+    - poetry add --dev pylint pylint-exit
+    - poetry run pylint ${SRC_FILES}
+
+mypy:
+  tags: ["osdu-small"]
+  image: python:3.11-slim
+  stage: linters
+  allow_failure: true
+  script:
+    - pip install poetry
+    - poetry install --no-dev
+    - poetry add --dev mypy
+    - poetry run mypy ${SRC_FILES}
\ No newline at end of file
-- 
GitLab


From ea75805404bbade6506e7090cafca8c589fc9f0d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 2 Jan 2025 14:27:45 +0200
Subject: [PATCH 085/111] test ci

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 80788fc..804efe4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -32,8 +32,8 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
-#stages:
-#  - linters
+stages:
+  - linters
 #  - build
 
 #stages:
-- 
GitLab


From fc186a3e7403b4e3858b0f3d3778b0f3f99d0a47 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Thu, 2 Jan 2025 14:37:47 +0200
Subject: [PATCH 086/111] test ci

---
 devops/osdu/build/python-linters.yml | 19 ++++++++++++++++---
 1 file changed, 16 insertions(+), 3 deletions(-)

diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index 999237d..647e8a5 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -2,6 +2,19 @@ variables:
   SRC_FILES: "src/"
   REQUIREMENTS_FILE: "pyproject.toml"
 
+.python:
+  image: python:3.11
+  tags:
+  - osdu-small
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+
 isort:
   needs: [ ]
   extends: [ ]
@@ -22,20 +35,20 @@ python-static-analysis:
 ruff:
   tags: ["osdu-small"]
   image: python:3.11-slim
+  extends: .python
   stage: linters
   allow_failure: false
   script:
-    - pip install poetry
     - poetry install --no-dev
     - poetry run ruff check .
 
 pylint:
   tags: ["osdu-small"]
   image: python:3.11-slim
+  extends: .python
   stage: linters
   allow_failure: true
   script:
-    - pip install poetry
     - poetry install --no-dev
     - poetry add --dev pylint pylint-exit
     - poetry run pylint ${SRC_FILES}
@@ -43,10 +56,10 @@ pylint:
 mypy:
   tags: ["osdu-small"]
   image: python:3.11-slim
+  extends: .python
   stage: linters
   allow_failure: true
   script:
-    - pip install poetry
     - poetry install --no-dev
     - poetry add --dev mypy
     - poetry run mypy ${SRC_FILES}
\ No newline at end of file
-- 
GitLab


From 4ecf09018f7b93770b786d2f8d333faed1b2f2ac Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 01:00:18 +0200
Subject: [PATCH 087/111] test ci

---
 .gitlab-ci.yml                       | 59 ++++++++++++++++++++++---
 devops/osdu/build/python-linters.yml | 66 +++++++++++++++++++++-------
 2 files changed, 104 insertions(+), 21 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 804efe4..a65852f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,8 +1,8 @@
 include:
 
-#  # PIPELINE LOGIC
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "standard-setup.yml"
+  # PIPELINE LOGIC
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "standard-setup.yml"
 
   # linters
   - project: "osdu/platform/ci-cd-pipelines"
@@ -32,9 +32,48 @@ include:
 #  - project: "osdu/platform/ci-cd-pipelines"
 #    file: "scanners/fossa-python.yml"
 
+#stages:
+#  - linting
+#  - testing
+#  - review
+#  - build
+#  - coverage
+#  - containerize
+#  - scan
+#  - deploy
+#  - bootstrap
+#  - integration
+#  - acceptance
+#  - performance-testing
+#  - publish
+#  - deploy_preship
+
 stages:
   - linters
-#  - build
+  - testing
+  - review
+  #- build
+  #- coverage
+  #- containerize
+  #- scan
+
+unit-tests:
+  tags: ["osdu-medium"]
+  stage: testing
+  script:
+    - coverage run -m pytest ./tests/unit --junitxml=report.xml
+    - coverage report
+    - coverage xml
+  coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
+  artifacts:
+    when: always
+    reports:
+      junit: report.xml
+      coverage_report:
+        coverage_format: cobertura
+        path: coverage.xml
+
+
 
 #stages:
 #  - review
@@ -102,4 +141,14 @@ stages:
 #    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
 #    - poetry publish --repository gitlab | tee output.txt
 #    - echo ${VERSION} > published_version.txt
-#    - cat published_version.txt
\ No newline at end of file
+#    - cat published_version.txt
+
+trigger-trusted-tests:
+  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+  before_script:
+    - echo "overriding before script"
+
+trusted-merge-branch-verification:
+  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+  before_script:
+    - echo "overriding before script"
diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index 647e8a5..8f9fd30 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -15,13 +15,13 @@ variables:
     - pip install poetry
     - poetry --version
 
-isort:
-  needs: [ ]
-  extends: [ ]
-  script:
-    echo 'empty'
-  only:
-    - $DISABLED == 'true'
+#isort:
+#  needs: [ ]
+#  extends: [ ]
+#  script:
+#    echo 'empty'
+#  only:
+#    - $DISABLED == 'true'
 
 
 python-static-analysis:
@@ -42,7 +42,18 @@ ruff:
     - poetry install --no-dev
     - poetry run ruff check .
 
-pylint:
+#pylint:
+#  tags: ["osdu-small"]
+#  image: python:3.11-slim
+#  extends: .python
+#  stage: linters
+#  allow_failure: true
+#  script:
+#    - poetry install --no-dev
+#    - poetry add --dev pylint pylint-exit
+#    - poetry run pylint ${SRC_FILES}
+
+mypy:
   tags: ["osdu-small"]
   image: python:3.11-slim
   extends: .python
@@ -50,16 +61,39 @@ pylint:
   allow_failure: true
   script:
     - poetry install --no-dev
-    - poetry add --dev pylint pylint-exit
-    - poetry run pylint ${SRC_FILES}
+    - poetry add --dev mypy
+    - poetry run mypy ${SRC_FILES}
 
-mypy:
+
+pylint:
   tags: ["osdu-small"]
-  image: python:3.11-slim
-  extends: .python
+  image:
+    name: apache/airflow:2.1.2-python3.8
+    entrypoint: [""]
   stage: linters
   allow_failure: true
   script:
-    - poetry install --no-dev
-    - poetry add --dev mypy
-    - poetry run mypy ${SRC_FILES}
\ No newline at end of file
+    - pip install setuptools pylint==2.17.6 pylint_quotes==0.2.3 pylint-exit==1.2.0
+    - sh -c 'pylint --rcfile=.pylintrc ${SRC_FILES}' || EXIT_CODE=$?
+    - exit ${EXIT_CODE}
+
+isort:
+  tags: ["osdu-small"]
+  image: python:3.8.12-slim
+  allow_failure: true
+  stage: linters
+  script:
+    - python -m pip install setuptools isort
+    - isort -c -v ${SRC_FILES} || EXIT_CODE=$?
+    - exit ${EXIT_CODE}
+
+#python-static-analysis:
+#  tags: ["osdu-small"]
+#  image: community.opengroup.org:5555/osdu/platform/deployment-and-operations/pytype-container/pytype:latest
+#  stage: linters
+#  variables:
+#    EXCLUDE: ""
+#  script:
+#    - pip install -r $REQUIREMENTS_FILE
+#    - pytype $SRC_FILES -k -j auto --exclude $EXCLUDE
+
-- 
GitLab


From b91ca59f7fc1d9184a53e49688c42b82e793c2fa Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 12:10:10 +0200
Subject: [PATCH 088/111] test ci

---
 .gitlab-ci.yml | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a65852f..ba5eeac 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -143,12 +143,12 @@ unit-tests:
 #    - echo ${VERSION} > published_version.txt
 #    - cat published_version.txt
 
-trigger-trusted-tests:
-  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-  before_script:
-    - echo "overriding before script"
-
-trusted-merge-branch-verification:
-  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-  before_script:
-    - echo "overriding before script"
+#trigger-trusted-tests:
+#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+#  before_script:
+#    - echo "overriding before script"
+#
+#trusted-merge-branch-verification:
+#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+#  before_script:
+#    - echo "overriding before script"
-- 
GitLab


From adafd68447fe4d0e0faca9d3efe769198a570ac3 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 13:59:21 +0200
Subject: [PATCH 089/111] test ci

---
 devops/osdu/build/python-linters.yml | 40 ++++++++++++++--------------
 1 file changed, 20 insertions(+), 20 deletions(-)

diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index 8f9fd30..e863899 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -65,27 +65,27 @@ mypy:
     - poetry run mypy ${SRC_FILES}
 
 
-pylint:
-  tags: ["osdu-small"]
-  image:
-    name: apache/airflow:2.1.2-python3.8
-    entrypoint: [""]
-  stage: linters
-  allow_failure: true
-  script:
-    - pip install setuptools pylint==2.17.6 pylint_quotes==0.2.3 pylint-exit==1.2.0
-    - sh -c 'pylint --rcfile=.pylintrc ${SRC_FILES}' || EXIT_CODE=$?
-    - exit ${EXIT_CODE}
+#pylint:
+#  tags: ["osdu-small"]
+#  image:
+#    name: apache/airflow:2.1.2-python3.8
+#    entrypoint: [""]
+#  stage: linters
+#  allow_failure: true
+#  script:
+#    - pip install setuptools pylint==2.17.6 pylint_quotes==0.2.3 pylint-exit==1.2.0
+#    - sh -c 'pylint --rcfile=.pylintrc ${SRC_FILES}' || EXIT_CODE=$?
+#    - exit ${EXIT_CODE}
 
-isort:
-  tags: ["osdu-small"]
-  image: python:3.8.12-slim
-  allow_failure: true
-  stage: linters
-  script:
-    - python -m pip install setuptools isort
-    - isort -c -v ${SRC_FILES} || EXIT_CODE=$?
-    - exit ${EXIT_CODE}
+#isort:
+#  tags: ["osdu-small"]
+#  image: python:3.8.12-slim
+#  allow_failure: true
+#  stage: linters
+#  script:
+#    - python -m pip install setuptools isort
+#    - isort -c -v ${SRC_FILES} || EXIT_CODE=$?
+#    - exit ${EXIT_CODE}
 
 #python-static-analysis:
 #  tags: ["osdu-small"]
-- 
GitLab


From 204a7934e5e1c00b1b2748f446ce45256ed46c0d Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:00:36 +0200
Subject: [PATCH 090/111] test ci

---
 devops/osdu/build/python-linters.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index e863899..db3b3e9 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -39,7 +39,7 @@ ruff:
   stage: linters
   allow_failure: false
   script:
-    - poetry install --no-dev
+    - poetry install --group dev
     - poetry run ruff check .
 
 #pylint:
-- 
GitLab


From 47b3da9e9262aa7e03664c57368fa0788434a987 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:03:55 +0200
Subject: [PATCH 091/111] test ci

---
 devops/osdu/build/python-linters.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index db3b3e9..a6232f8 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -39,7 +39,7 @@ ruff:
   stage: linters
   allow_failure: false
   script:
-    - poetry install --group dev
+    - poetry install --dev
     - poetry run ruff check .
 
 #pylint:
-- 
GitLab


From edbff46532def397560b949d724712eb7728d21f Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:08:32 +0200
Subject: [PATCH 092/111] test ci

---
 devops/osdu/build/python-linters.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/devops/osdu/build/python-linters.yml b/devops/osdu/build/python-linters.yml
index a6232f8..b95a2aa 100644
--- a/devops/osdu/build/python-linters.yml
+++ b/devops/osdu/build/python-linters.yml
@@ -39,7 +39,7 @@ ruff:
   stage: linters
   allow_failure: false
   script:
-    - poetry install --dev
+    - poetry install
     - poetry run ruff check .
 
 #pylint:
-- 
GitLab


From afc24d2e859ccbe06fa88c9739264432451e3ea9 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:20:52 +0200
Subject: [PATCH 093/111] test ci

---
 .gitlab-ci.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ba5eeac..6c22d6a 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -60,6 +60,9 @@ stages:
 unit-tests:
   tags: ["osdu-medium"]
   stage: testing
+  before_script:
+    - apt-get install -y jq curl
+    - pip install coverage
   script:
     - coverage run -m pytest ./tests/unit --junitxml=report.xml
     - coverage report
-- 
GitLab


From 2be7cec121726008f939d7c810d040692062b0fa Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:26:55 +0200
Subject: [PATCH 094/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6c22d6a..32bd151 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -59,6 +59,7 @@ stages:
 
 unit-tests:
   tags: ["osdu-medium"]
+  image: python:3.11
   stage: testing
   before_script:
     - apt-get install -y jq curl
-- 
GitLab


From a995e7ccaf012588e7791fb5161e79a4db8fb80e Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:31:29 +0200
Subject: [PATCH 095/111] test ci

---
 .gitlab-ci.yml | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 32bd151..0b3cc3b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -61,10 +61,9 @@ unit-tests:
   tags: ["osdu-medium"]
   image: python:3.11
   stage: testing
-  before_script:
+  script:
     - apt-get install -y jq curl
     - pip install coverage
-  script:
     - coverage run -m pytest ./tests/unit --junitxml=report.xml
     - coverage report
     - coverage xml
-- 
GitLab


From 1005336f87db58880c22402c209a4f3623a39e7b Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:34:13 +0200
Subject: [PATCH 096/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0b3cc3b..e845899 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -62,6 +62,7 @@ unit-tests:
   image: python:3.11
   stage: testing
   script:
+    - apt-get update
     - apt-get install -y jq curl
     - pip install coverage
     - coverage run -m pytest ./tests/unit --junitxml=report.xml
-- 
GitLab


From 37ce0f3eb1b262dfa8e3f329ab68066ea021c35e Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 14:40:49 +0200
Subject: [PATCH 097/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e845899..858fffb 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -65,6 +65,7 @@ unit-tests:
     - apt-get update
     - apt-get install -y jq curl
     - pip install coverage
+    - pip install pytest
     - coverage run -m pytest ./tests/unit --junitxml=report.xml
     - coverage report
     - coverage xml
-- 
GitLab


From 2b26fa1a0d3316b2b20d3db13b1d6262bbbf5553 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:16:22 +0200
Subject: [PATCH 098/111] test ci

---
 .gitlab-ci.yml | 12 +++++-------
 1 file changed, 5 insertions(+), 7 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 858fffb..ba9a0cc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -60,15 +60,13 @@ stages:
 unit-tests:
   tags: ["osdu-medium"]
   image: python:3.11
+  extends: .python
   stage: testing
   script:
-    - apt-get update
-    - apt-get install -y jq curl
-    - pip install coverage
-    - pip install pytest
-    - coverage run -m pytest ./tests/unit --junitxml=report.xml
-    - coverage report
-    - coverage xml
+    - poetry install
+    - poetry run coverage run -m pytest ./tests/unit --junitxml=report.xml
+    - poetry run coverage report
+    - poetry run coverage xml
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
   artifacts:
     when: always
-- 
GitLab


From 048306b1fd0d3d6c2b02baf2d3886da18944ffda Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:24:21 +0200
Subject: [PATCH 099/111] test ci

---
 .gitlab-ci.yml | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ba9a0cc..1706635 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -64,17 +64,19 @@ unit-tests:
   stage: testing
   script:
     - poetry install
-    - poetry run coverage run -m pytest ./tests/unit --junitxml=report.xml
+    - poetry run coverage run -m pytest ./tests/unit --junitxml=$CI_PROJECT_DIR/report.xml
     - poetry run coverage report
     - poetry run coverage xml
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
   artifacts:
     when: always
+    paths:
+      - $CI_PROJECT_DIR/coverage.xml
     reports:
-      junit: report.xml
+      junit: $CI_PROJECT_DIR/report.xml
       coverage_report:
         coverage_format: cobertura
-        path: coverage.xml
+        path: $CI_PROJECT_DIR/coverage.xml
 
 
 
-- 
GitLab


From d44bea4d5c363989cf263651bd2f0f9e5eedd265 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:29:49 +0200
Subject: [PATCH 100/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1706635..1e0df93 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -66,7 +66,7 @@ unit-tests:
     - poetry install
     - poetry run coverage run -m pytest ./tests/unit --junitxml=$CI_PROJECT_DIR/report.xml
     - poetry run coverage report
-    - poetry run coverage xml
+    - poetry run coverage xml -o $CI_PROJECT_DIR/coverage.xml
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
   artifacts:
     when: always
-- 
GitLab


From a6e04045c0b6a669b06274be8c41d9e892d23de2 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:37:35 +0200
Subject: [PATCH 101/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1e0df93..2571bef 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -65,7 +65,7 @@ unit-tests:
   script:
     - poetry install
     - poetry run coverage run -m pytest ./tests/unit --junitxml=$CI_PROJECT_DIR/report.xml
-    - poetry run coverage report
+    - poetry run coverage report --fail-under=55
     - poetry run coverage xml -o $CI_PROJECT_DIR/coverage.xml
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
   artifacts:
-- 
GitLab


From 36ad5969e081c033e5c5575b8d0dd27409a680d5 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:47:58 +0200
Subject: [PATCH 102/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2571bef..eca9ca6 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -66,7 +66,7 @@ unit-tests:
     - poetry install
     - poetry run coverage run -m pytest ./tests/unit --junitxml=$CI_PROJECT_DIR/report.xml
     - poetry run coverage report --fail-under=55
-    - poetry run coverage xml -o $CI_PROJECT_DIR/coverage.xml
+    - poetry run coverage xml -o $CI_PROJECT_DIR/coverage.xml --fail-under=55
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
   artifacts:
     when: always
-- 
GitLab


From 430861fff7dcd4fcf66e663fe36bebd391571627 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:58:14 +0200
Subject: [PATCH 103/111] test ci

---
 .gitlab-ci.yml | 29 ++++++++++-------------------
 1 file changed, 10 insertions(+), 19 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index eca9ca6..3e67a93 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,30 +7,21 @@ include:
   # linters
   - project: "osdu/platform/ci-cd-pipelines"
     file: "build/python-linters.yml"
-
-  #- local: "devops/osdu/build/python.yml"
   - local: "devops/osdu/build/python-linters.yml"
-#  # deply
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-package.yml"
 
+  # ultimate
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/gitlab-ultimate.yml"
 
-  #- local: "devops/osdu/pipeline/override-stage.yml"
+  # fossa
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "scanners/fossa-python.yml"
 
-  #- local: "devops/osdu/pipeline/override-stage.yml"
+#  # deply
+#  - project: "osdu/platform/ci-cd-pipelines"
+#    file: "build/python-package.yml"
 
 
-#  # BUILD
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python.yml"
-#
-#  # ultimate
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "scanners/gitlab-ultimate.yml"
-#
-#  # fossa
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "scanners/fossa-python.yml"
 
 #stages:
 #  - linting
@@ -55,7 +46,7 @@ stages:
   #- build
   #- coverage
   #- containerize
-  #- scan
+  - scan
 
 unit-tests:
   tags: ["osdu-medium"]
-- 
GitLab


From cfa81ccee1399b20ed30c35d9029ce139548f746 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 15:59:18 +0200
Subject: [PATCH 104/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3e67a93..d7755e5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -48,7 +48,7 @@ stages:
   #- containerize
   - scan
 
-unit-tests:
+compile-and-unit-test:
   tags: ["osdu-medium"]
   image: python:3.11
   extends: .python
-- 
GitLab


From 22c7d4544b7aa3ac26bdf652e562d582ef143177 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 16:07:33 +0200
Subject: [PATCH 105/111] test ci

---
 .gitlab-ci.yml | 55 +++++++++++++++++++++++++-------------------------
 1 file changed, 28 insertions(+), 27 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d7755e5..63b6bae 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -17,9 +17,9 @@ include:
   - project: "osdu/platform/ci-cd-pipelines"
     file: "scanners/fossa-python.yml"
 
-#  # deply
-#  - project: "osdu/platform/ci-cd-pipelines"
-#    file: "build/python-package.yml"
+  # deply
+  - project: "osdu/platform/ci-cd-pipelines"
+    file: "build/python-package.yml"
 
 
 
@@ -47,6 +47,7 @@ stages:
   #- coverage
   #- containerize
   - scan
+  - deply
 
 compile-and-unit-test:
   tags: ["osdu-medium"]
@@ -114,30 +115,30 @@ compile-and-unit-test:
 #    - poetry run ruff check .
 #
 #
-#publish-package:
-#  tags: ['osdu-small']
-#  image: python:3.11
-#  before_script:
-#    - apt-get update
-#    - apt-get install -y jq curl
-#    - pip install --upgrade pip
-#    - pip install nox
-#    - pip install nox-poetry
-#    - pip install poetry
-#    - poetry --version
-#  script:
-#    - VERSION=$(python version.py)
-#    - echo ${VERSION}
-#    - |
-#       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
-#    - echo ${CURRENT_VERSION}
-#    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
-#    - poetry build
-#    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
-#    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
-#    - poetry publish --repository gitlab | tee output.txt
-#    - echo ${VERSION} > published_version.txt
-#    - cat published_version.txt
+publish-package:
+  tags: ['osdu-small']
+  image: python:3.11
+  before_script:
+    - apt-get update
+    - apt-get install -y jq curl
+    - pip install --upgrade pip
+    - pip install nox
+    - pip install nox-poetry
+    - pip install poetry
+    - poetry --version
+  script:
+    - VERSION=$(python version.py)
+    - echo ${VERSION}
+    - |
+       CURRENT_VERSION=$(cat pyproject.toml | grep -n version | head -1 | sed -En 's/^3:version = //p' | cut -d "\"" -f 2)
+    - echo ${CURRENT_VERSION}
+    - sed -i "s/${CURRENT_VERSION}/${VERSION}/g" pyproject.toml
+    - poetry build
+    - poetry config repositories.gitlab "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi"
+    - poetry config http-basic.gitlab gitlab-ci-token "$CI_JOB_TOKEN"
+    - poetry publish --repository gitlab | tee output.txt
+    - echo ${VERSION} > published_version.txt
+    - cat published_version.txt
 
 #trigger-trusted-tests:
 #  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-- 
GitLab


From 2ac1405fb98c4ec6bf1ee565ffd05e60e14be9e4 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 16:08:47 +0200
Subject: [PATCH 106/111] test ci

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 63b6bae..ed10ebd 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -118,6 +118,7 @@ compile-and-unit-test:
 publish-package:
   tags: ['osdu-small']
   image: python:3.11
+  stage: deploy
   before_script:
     - apt-get update
     - apt-get install -y jq curl
-- 
GitLab


From d6196adb69af093208523d9daec95219381931f5 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 16:09:24 +0200
Subject: [PATCH 107/111] test ci

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ed10ebd..d2374bf 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -47,7 +47,7 @@ stages:
   #- coverage
   #- containerize
   - scan
-  - deply
+  - deploy
 
 compile-and-unit-test:
   tags: ["osdu-medium"]
-- 
GitLab


From 302ecb910749c9bb0d3bd1aecf4565ea199f5a70 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 16:20:23 +0200
Subject: [PATCH 108/111] test ci

---
 .gitlab-ci.yml | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d2374bf..101c0d2 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -141,12 +141,12 @@ publish-package:
     - echo ${VERSION} > published_version.txt
     - cat published_version.txt
 
-#trigger-trusted-tests:
-#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-#  before_script:
-#    - echo "overriding before script"
-#
-#trusted-merge-branch-verification:
-#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-#  before_script:
-#    - echo "overriding before script"
+trigger-trusted-tests:
+  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+  before_script:
+    - echo "overriding before script"
+
+trusted-merge-branch-verification:
+  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+  before_script:
+    - echo "overriding before script"
-- 
GitLab


From 7a2c60b6e49af780e9096bcfb90d502a1b774c2b Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 23:25:55 +0200
Subject: [PATCH 109/111] test ci

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 101c0d2..97b725a 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -118,6 +118,8 @@ compile-and-unit-test:
 publish-package:
   tags: ['osdu-small']
   image: python:3.11
+  extends:
+    - .skipForTriggeringMergeRequests
   stage: deploy
   before_script:
     - apt-get update
-- 
GitLab


From 623598942452aace6204b805f178297517300f9b Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Fri, 3 Jan 2025 23:49:39 +0200
Subject: [PATCH 110/111] test ci

---
 .gitlab-ci.yml | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 97b725a..1db0133 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -143,12 +143,12 @@ publish-package:
     - echo ${VERSION} > published_version.txt
     - cat published_version.txt
 
-trigger-trusted-tests:
-  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-  before_script:
-    - echo "overriding before script"
-
-trusted-merge-branch-verification:
-  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
-  before_script:
-    - echo "overriding before script"
+#trigger-trusted-tests:
+#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+#  before_script:
+#    - echo "overriding before script"
+#
+#trusted-merge-branch-verification:
+#  image: $CI_REGISTRY/osdu/platform/trusted-mr-container
+#  before_script:
+#    - echo "overriding before script"
-- 
GitLab


From 254e6a89ba401b412e58503539916f8a1ddacef1 Mon Sep 17 00:00:00 2001
From: Ihor Anikeiev <ihor_anikeiev@epam.com>
Date: Mon, 6 Jan 2025 15:32:53 +0200
Subject: [PATCH 111/111] test ci

---
 .fossa.yml | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)
 create mode 100644 .fossa.yml

diff --git a/.fossa.yml b/.fossa.yml
new file mode 100644
index 0000000..795dced
--- /dev/null
+++ b/.fossa.yml
@@ -0,0 +1,19 @@
+# Generated by FOSSA CLI (https://github.com/fossas/fossa-cli)
+# Visit https://fossa.com to learn more
+
+version: 3
+cli:
+  server: https://app.fossa.com
+  fetcher: custom
+  project: sdfs-test
+analyze:
+  modules:
+  - name: .
+    type: pip
+    target: .
+    path: .
+  tools:
+    python:
+      manager: poetry
+      install_command: poetry install --no-root --with dev
+      build_command: poetry build
-- 
GitLab