[
  {
    "path": ".github/workflows/deploying.yml",
    "content": "name: S3Path Deplyer\n\non:  # workflow_dispatch\n  release:\n    types: [published]\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v2\n    - name: Set up Python\n      uses: actions/setup-python@v2\n      with:\n        python-version: '3.9'\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install build\n    - name: Build package\n      run: python -m build\n    - name: Publish package\n      uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29\n      with:\n        user: __token__\n        password: ${{ secrets.PYPI_API_TOKEN }}\n\n"
  },
  {
    "path": ".github/workflows/testing.yml",
    "content": "name: S3Path Tester\n\non:\n  push:\n    branches: [ master ]\n  pull_request:\n    branches: [ master ]\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n    strategy:\n      matrix:\n        python-version: [3.9, \"3.10\", 3.11, 3.12, 3.13]\n    steps:\n      - uses: actions/checkout@v2\n\n      - name: Set up Python ${{ matrix.python-version }}\n        uses: actions/setup-python@v2\n        with:\n          python-version: ${{ matrix.python-version }}\n\n      - name: Display Python version\n        run: python -c \"import sys; print(sys.version)\"\n\n      - name: Install dependencies\n        env:\n          PIPENV_DEFAULT_PYTHON_VERSION: ${{ matrix.python-version }}\n        run: make init\n\n      - name: Run Tests\n        run: make tests\n"
  },
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n.hypothesis/\n.pytest_cache/\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# pyenv\n.python-version\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mypy\n.mypy_cache/\n\n# PyCharm\n.idea/\n\n# Pipfile\nPipfile.lock"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2019 Lior Mizrahi\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include s3path.py\ninclude setup.py\ninclude README.rst\ninclude LICENSE\n"
  },
  {
    "path": "Makefile",
    "content": ".PHONY: docs tests\ninit:\n\tpython -m pip install --upgrade pip\n\tpython -m pip install --upgrade pipenv\n\tpipenv install --skip-lock\n\tpipenv run pip freeze\n\ndeveloper:\n\tpipenv install --dev --skip-lock\n\ntests:\n\tpipenv run pytest\n\npublish:\n\tpipenv run python setup.py sdist bdist_wheel\n\tpipenv run twine upload dist/*\n\trm -fr build dist .egg s3path.egg-info\n\ncheck: tests\n"
  },
  {
    "path": "Pipfile",
    "content": "[[source]]\nurl = \"https://pypi.org/simple\"\nverify_ssl = true\nname = \"pypi\"\npython_version = \"3.13\"\n\n[packages]\nmoto = \"*\"\npytest = \"*\"\nsphinx = \"*\"\ntwine = \"*\"\npytest-cov = \"*\"\nsmart-open = \"*\"\npackaging = \"*\"\nmypy = \"*\"\n\n[dev-packages]\nipython = \"*\"\nipdb = \"*\"\ns3path = {editable = true, path = \".\"}\n"
  },
  {
    "path": "README.rst",
    "content": "S3Path\n======\n\n.. image:: https://badgen.net/pypi/v/s3path\n    :target: https://pypi.org/project/s3path/\n    :alt: Latest version\n\n.. image:: https://github.com/liormizr/s3path/actions/workflows/testing.yml/badge.svg?branch=master&event=push\n    :target: https://github.com/liormizr/s3path/actions/workflows/testing.yml\n    :alt: S3Path CI\n\nS3Path provide a Python convenient File-System/Path like interface for AWS S3 Service using boto3 S3 resource as a driver.\n\nLike pathlib, but for S3 Buckets\n________________________________\n\nAWS S3 is among the most popular cloud storage solutions. It's object storage, is built to store and retrieve various amounts of data from anywhere.\n\nCurrently, Python developers use Boto3 as the default API to connect / put / get / list / delete files from S3.\n\nS3Path blends Boto3's ease of use and the familiarity of pathlib api.\n\nInstall:\n========\n\nFrom PyPI:\n\n.. code:: bash\n\n    $ pip install s3path\n\nFrom Conda:\n\n.. code:: bash\n\n    $ conda install -c conda-forge s3path\n\nBasic use:\n==========\n\nThe following example assumes an s3 bucket setup as specified bellow:\n\n.. code:: bash\n\n    $ aws s3 ls s3://pypi-proxy/\n\n    2018-04-24 22:59:59        186 requests/index.html\n    2018-04-24 22:59:57     485015 requests/requests-2.9.1.tar.gz\n    2018-04-24 22:35:01      89112 boto3/boto3-1.4.1.tar.gz\n    2018-04-24 22:35:02        180 boto3/index.html\n    2018-04-24 22:35:19    3308919 botocore/botocore-1.4.93.tar.gz\n    2018-04-24 22:35:36        188 botocore/index.html\n\nImporting the main class:\n\n.. code:: python\n\n   >>> from s3path import S3Path\n\nListing \"subdirectories\" - s3 keys can be split like file-system with a `/` in s3path we:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> [path for path in bucket_path.iterdir() if path.is_dir()]\n   [S3Path('/pypi-proxy/requests/'),\n    S3Path('/pypi-proxy/boto3/'),\n    S3Path('/pypi-proxy/botocore/')]\n\nListing html source files in this \"directory\" tree:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> list(bucket_path.glob('**/*.html'))\n   [S3Path('/pypi-proxy/requests/index.html'),\n    S3Path('/pypi-proxy/boto3/index.html'),\n    S3Path('/pypi-proxy/botocore/index.html')]\n\nNavigating inside a \"directory\" tree:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> boto3_package_path = bucket_path / 'boto3' / 'boto3-1.4.1.tar.gz'\n   >>> boto3_package_path\n   S3Path('/pypi-proxy/boto3/boto3-1.4.1.tar.gz')\n\nQuerying path properties:\n\n.. code:: python\n\n   >>> boto3_package_path = S3Path('/pypi-proxy/boto3/boto3-1.4.1.tar.gz')\n   >>> boto3_package_path.exists()\n   True\n   >>> boto3_package_path.is_dir()\n   False\n   >>> boto3_package_path.is_file()\n   True\n\nOpening a \"file\" (s3 key):\n\n.. code:: python\n\n   >>> botocore_index_path = S3Path('/pypi-proxy/botocore/index.html')\n   >>> with botocore_index_path.open() as f:\n   >>>     print(f.read())\n   \"\"\"\n   <!DOCTYPE html>\n   <html>\n   <head>\n       <meta charset=\"UTF-8\">\n       <title>Package Index</title>\n   </head>\n   <body>\n       <a href=\"botocore-1.4.93.tar.gz\">botocore-1.4.93.tar.gz</a><br>\n   </body>\n   </html>\n   \"\"\"\n\n\nOr Simply reading:\n\n.. code:: python\n\n   >>> botocore_index_path = S3Path('/pypi-proxy/botocore/index.html')\n   >>> botocore_index_path.read_text()\n   \"\"\"\n   <!DOCTYPE html>\n   <html>\n   <head>\n       <meta charset=\"UTF-8\">\n       <title>Package Index</title>\n   </head>\n   <body>\n       <a href=\"botocore-1.4.93.tar.gz\">botocore-1.4.93.tar.gz</a><br>\n   </body>\n   </html>\n   \"\"\"\n\nVersioned S3 Objects:\n=====================\n\ns3path supports versioned objects for S3 buckets that have versioning enabled. ``VersionedS3Path`` is a subclass of ``S3Path`` that supports all of its features. The main difference is an additional required ``version_id`` keyword parameter in each of its constructor methods.\n\n.. code:: python\n\n   >>> from s3path import VersionedS3Path\n   >>> bucket, key, version_id = 'my-bucket', 'my-key', 'my-version-id'\n   >>> VersionedS3Path(f'/{bucket}/{key}', version_id=version_id)\n   VersionedS3Path('/my-bucket/my-key', version_id='my-version-id')\n   >>> VersionedS3Path.from_uri(f's3://{bucket}/{key}', version_id=version_id)\n   VersionedS3Path('/my-bucket/my-key', version_id='my-version-id')\n   >>> VersionedS3Path.from_bucket_key(bucket=bucket, key=key, version_id=version_id)\n   VersionedS3Path('/my-bucket/my-key', version_id='my-version-id')\n\nNew in version 0.5.0\n\nRequirements:\n=============\n\n* Python >= 3.4\n* boto3\n* smart-open\n\nFurther Documentation:\n======================\n\n* `Advanced S3Path configuration`_ (S3 parameters, S3-compatible storage, etc.)\n* `Abstract pathlib interface`_ implemented by S3Path\n* `Boto3 vs S3Path usage examples`_\n\n\n.. _Abstract pathlib interface: https://github.com/liormizr/s3path/blob/master/docs/interface.rst\n.. _Boto3 vs S3Path usage examples: https://github.com/liormizr/s3path/blob/master/docs/comparison.rst\n.. _Advanced S3Path configuration: https://github.com/liormizr/s3path/blob/master/docs/advance.rst\n"
  },
  {
    "path": "docs/advance.rst",
    "content": "Advance features (configurations/s3 parameters):\n================================================\n\nBasically s3path is trying to be as pure as possible from any non `pathlib`_ features.\n\nThe goal is to take the AWS S3 service and integrate it into `pathlib`_'s interface without changes.\n\nOnly then s3path provides a Python-convenient File-System/Path like interface for AWS's S3 service using `boto3`_ S3 resource as a driver.\n\n\nConfigurations:\n---------------\n\ns3path uses `boto3`_ as the SDK for AWS S3 service.\n\nTo use `boto3`_ you first need to configure it. For the full documentation see `configuration`_.\n\n`boto3`_ has multiple ways to input configurations, s3path only supportes the following:\n\n1. Environment variables\n#. Shared credential file (~/.aws/credentials)\n#. AWS config file (~/.aws/config)\n#. Assume Role provider\n#. Instance metadata service on an Amazon EC2 instance that has an IAM role configured.\n\nWith s3path, you can't specify configurations. The only way to specify configurations in code, is with `setup_default_session`_.\n\nFor Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> from s3path import S3Path\n   >>> boto3.setup_default_session(\n   ...     region_name='us-east-1',\n   ...     aws_access_key_id='<access-key>',\n   ...     aws_secret_access_key='<access-secret>')\n   >>>\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> [path for path in bucket_path.iterdir() if path.is_dir()]\n   ... [S3Path('/pypi-proxy/requests/'),\n   ...  S3Path('/pypi-proxy/boto3/'),\n   ...  S3Path('/pypi-proxy/botocore/')]\n\nParameters:\n-----------\n\nWe can map any kind of parameters that `boto3`_ `s3-resource`_ methods supports per path.\n\nFor Example:\n\nIf you want to add Server-side encryption to your Bucket, you may do it per path like this:\n\n.. code:: python\n\n   >>> from s3path import S3Path, register_configuration_parameter\n   >>> bucket = S3Path('/my-bucket/')\n   >>> register_configuration_parameter(bucket, parameters={'ServerSideEncryption': 'AES256'})\n\nThis will work for every s3path.\n\nS3Path('/') - parameters that will be used as default\n\nS3Path('/bucket/') - parameters that will be used per bucket\n\nS3Path('/bucket/key-prefix-directory/') - parameters that will be used per bucket, key prefix\n\n**NOTE:** We recommend configuring everything only in one place and not in the code.\n\n\nS3 Compatible Storage:\n----------------------\n\nThere are some cases that we want to use s3path for S3-Compatible Storage.\n\nSome examples for S3-Compatible Storage can be:\n\n* `LocalStack`_ - A fully functional local AWS cloud stack\n* `MinIO`_ - MinIO is a High Performance Object Storage released under Apache License v2.0\n\n`boto3`_ can be used as a SDK for such scenarios.\n\nTherefor you can use s3path for them as well.\n\nAnd even specify per \"Bucket\" what is the source.\n\nThis example show how to specify default AWS S3 parameters, a `LocalStack`_ Bucket, and a `MinIO`_ Bucket:\n\n.. code:: python\n\n   >>> import boto3\n   >>> from botocore.client import Config\n   >>> from s3path import PureS3Path, register_configuration_parameter\n   >>> # Define path's for configuration\n   >>> default_aws_s3_path = PureS3Path('/')\n   >>> local_stack_bucket_path = PureS3Path('/LocalStackBucket/')\n   >>> minio_bucket_path = PureS3Path('/MinIOBucket/')\n   >>> # Define boto3 s3 resources\n   >>> local_stack_resource = boto3.resource('s3', endpoint_url='http://localhost:4566')\n   >>> minio_resource = boto3.resource(\n       's3',\n       endpoint_url='http://localhost:9000',\n       aws_access_key_id='minio',\n       aws_secret_access_key='minio123',\n       config=Config(signature_version='s3v4'),\n       region_name='us-east-1')\n   >>> # Configure and map root path's per boto3 parameters or resources\n   >>> register_configuration_parameter(default_aws_s3_path, parameters={'ServerSideEncryption': 'AES256'})\n   >>> register_configuration_parameter(local_stack_bucket_path, resource=local_stack_resource)\n   >>> register_configuration_parameter(minio_bucket_path, resource=minio_resource)\n\n\ns3path library general options:\n-------------------------------\n\nIn Version 0.4.0 we added a new algorithm for the r/glob methods.\nTo enable the old (pathlib common) Algorithm you can configure it like this:\n\n.. code:: python\n\n   >>> from s3path import PureS3Path, register_configuration_parameter\n   >>> # Define path's for configuration\n   >>> path = PureS3Path('/')\n   >>> register_configuration_parameter(path, glob_new_algorithm=False)\n\n**Note: from version 0.6.0 glob implementation will work only with the new algorithm, there for the glob_new_algorithm arg is in depreciation cycle**\n\n.. _pathlib : https://docs.python.org/3/library/pathlib.html\n.. _boto3 : https://github.com/boto/boto3\n.. _configuration: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html\n.. _profiles: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#shared-credentials-file\n.. _setup_default_session: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/boto3.html?highlight=setup_default_session#boto3.setup_default_session\n.. _s3-resource: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#service-resource\n.. _LocalStack: https://github.com/localstack/localstack\n.. _MinIO: https://docs.min.io/\n"
  },
  {
    "path": "docs/comparison.rst",
    "content": "S3Path VS Boto3 S3 SDK\n======================\n\nMost of the boto3 examples are taken from here: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-examples.html\n\nBuckets List:\n-------------\n\nS3Path Example:\n\n.. code:: python\n\n   >>> from s3path import S3Path\n   >>> for bucket in S3Path('/').iterdir():\n   ...     print(bucket)\n\nboto3 Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> # Create an S3 client\n   >>> s3 = boto3.client('s3')\n   >>> # Call S3 to list current buckets\n   >>> response = s3.list_buckets()\n   >>> # Get a list of all bucket names from the response\n   >>> buckets = [bucket['Name'] for bucket in response['Buckets']]\n   >>> # Print out the bucket list\n   >>> for bucket in buckets:\n   ...     print(bucket)\n\nCreate an Amazon S3 Bucket\n--------------------------\n\nS3Path Example:\n\n.. code:: python\n\n   >>> from s3path import S3Path\n   >>> S3Path('/my-bucket/').mkdir()\n\nboto3 Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> s3 = boto3.resource('s3')\n   >>> s3.create_bucket(Bucket='my-bucket')\n\nUpload a File to an Amazon S3 Bucket\n------------------------------------\n\nS3Path Example:\n\n.. code:: python\n\n   >>> from pathlib import Path\n   >>> from s3path import S3Path\n   >>> local_path = Path('/tmp/hello.txt')\n   >>> S3Path('/my-bucket/hello.txt').write_text(local_path.read_text())\n\nS3Path Example (buffered, to avoid loading large files into memory):\n\n.. code:: python\n\n   >>> import shutil\n   >>> from pathlib import Path\n   >>> from s3path import S3Path\n   >>> local_path = Path('/tmp/hello.txt')\n   >>> remote_path = S3Path('/my-bucket/hello.txt')\n   >>> with local_path.open('rb') as src, remote_path.open('wb') as dst:\n   >>>     shutil.copyfileobj(src, dst)\n\nboto3 Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> s3 = boto3.resource('s3')\n   >>> bucket = s3.Bucket('my-bucket')\n   >>> bucket.upload_file(Fileobj='/tmp/hello.txt', Key='hello.txt')\n\nDownloading a File\n------------------\n\nS3Path Example:\n\n.. code:: python\n\n   >>> from pathlib import Path\n   >>> from s3path import S3Path\n   >>> local_path = Path('./my_local_image.jpg')\n   >>> local_path.write_text(S3Path('/my-bucket/my_image_in_s3.jpg').read_text())\n\nboto3 Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> import botocore\n   >>> s3 = boto3.resource('s3')\n   >>>\n   >>> try:\n   >>>     bucket = s3.Bucket('my-bucket')\n   >>>     bucket.download_file(Key='my_image_in_s3.jpg', Filename='my_local_image.jpg')\n   >>> except botocore.exceptions.ClientError as e:\n   >>>     if e.response['Error']['Code'] == \"404\":\n   >>>         print(\"The object does not exist.\")\n   >>>     else:\n   >>>         raise\n\nRetrieving subfolders names in S3 bucket\n----------------------------------------\n\nS3Path Example:\n\n.. code:: python\n\n   >>> from s3path import S3Path\n   >>> for path in S3Path('/my-bucket/prefix-name-with-slash/').iterdir():\n   >>>     if path.is_dir():\n   >>>         print('sub folder : ', path)\n\nboto3 Example:\n\n.. code:: python\n\n   >>> import boto3\n   >>> s3_client = boto3.client('s3')\n   >>> result = client.list_objects(Bucket='my-bucket', Prefix='prefix-name-with-slash/', Delimiter='/')\n   >>> for o in result.get('CommonPrefixes'):\n   >>>     print('sub folder : ', o.get('Prefix'))\n"
  },
  {
    "path": "docs/interface.rst",
    "content": ".. image:: s3path_graph.svg\n\nConcrete paths:\n===============\n\nFull basic Path documentation linked here: `PathDocs`_.\n\n.. _S3Path:\n\nS3Path(\\*pathsegments)\n^^^^^^^^^^^^^^^^^^^^^^\n\nA subclass of `Path`_ and PureS3Path_, this class represents a concrete paths of AWS S3 Service.\nAll actions are use `boto3`_ as the SKD for AWS S3 Service:\n\n.. code:: python\n\n   >>> S3Path('/<bucket>/<key>')\n   S3Path('/<bucket>/<key>')\n\npathsegments are specified similarly to `Path`_.\n\nYou can't use S3Path if you doesn't have boto3 installed in your environment:\n\n.. code:: python\n\n   >>> import boto3\n   Traceback (most recent call last):\n     File \"<stdin>\", line 1, in <module>\n   ModuleNotFoundError: No module named 'boto3'\n   >>> from s3path import S3Path\n   >>> S3Path('/<bucket>/<key>')\n   Traceback (most recent call last):\n   File \"<stdin>\", line 1, in <module>\n   File \"pathlib.py\", line 798, in __new__\n     % (cls.__name__,))\n   NotImplementedError: cannot instantiate 'S3Path' on your system\n\n.. _VersionedS3Path:\n\nVersionedS3Path(\\*pathsegments, version_id)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nA subclass of `S3Path`_ and `PureVersionedS3Path`_, this class represents a concrete path of the AWS\nS3 Service for buckets in which `S3 versioning`_ is enabled. All actions use `boto3`_ as the SKD for\nAWS S3 Service:\n\n.. code:: python\n\n   >>> from s3path import VersionedS3Path\n   >>> VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n   VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n\n| pathsegments are specified similarly to `Path`_\n| version_id is a string that can be any valid `AWS S3 version identifier`_\n|\n| New in version 0.5.0\n\nMethods:\n========\n\nS3Path and VersionedS3Path provide the following methods in addition to pure paths methods.\nAll the methods below will raise a `ValueError`_ if the path isn't absolute.\nMany of these methods can raise a `botocore.exceptions.ClientError` if `boto3`_ call fails\n(for example because the path doesn't exist).\n\n**NOTE:** The following signatures are shown for `S3Path`_ but are equally valid for\n`VersionedS3Path`_ as well. Any behavioral differences between `S3Path`_ methods and their\n`VersionedS3Path`_ equivalents are explicitly detailed below (i.e. if a given `VersionedS3Path`_\nmethod signature is not listed below, it is assumed that it behaves identically to its `S3Path`_\nequivalent).\n\n.. _S3Path.stat:\n\nS3Path.stat(*, follow_symlinks=True)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nReturns information about this path (similarly to boto3's `ObjectSummary`_).\nFor compatibility with `pathlib`_, the returned object some similar attributes like `os.stat_result`_.\nThe result is looked up at each call to this method:\n\n.. code:: python\n\n   >>> path_stat = S3Path('/pypi-proxy/boto3/index.html').stat()\n   >>> path_stat\n   StatResult(size=188, last_modified=datetime.datetime(2018, 4, 4, 12, 26, 3, tzinfo=tzutc()), version_id=None)\n   >>> path_stat.st_size\n   188\n   >>> path_stat.st_mtime\n   1522833963.0\n   >>> print(path_stat.st_version_id)\n   None\n   >>> path_stat.st_atime\n   Traceback (most recent call last):\n   ...\n   io.UnsupportedOperation: StatResult do not support st_atime attribute\n\n**NOTES:**\n\n* ``follow_symlinks`` option must be always set to ``True``.\n* The returned object will contain an additional ``st_version_id`` attribute that is not part of the\n`os.stat_result`_ API. The value of ``st_version_id`` will be ``None``.\n\nVersionedS3Path.stat(*, follow_symlinks=True)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nBehaves the same as `S3Path.stat`_ with the exception that the ``st_version_id`` attribute of the\nreturned object will contain the version ID of the underlying S3 object.\n\n.. _S3Path.exists:\n\nS3Path.exists()\n^^^^^^^^^^^^^^^\n\nWhether the path points to an existing Bucket, key or key prefix:\n\n.. code:: python\n\n   >>> S3Path('/pypi-proxy/boto3/index.html').exists()\n   True\n   >>> S3Path('/pypi-proxy/boto3/').exists()\n   True\n   >>> S3Path('/fake-bucket/').exists()\n   False\n\nVersionedS3Path.exists()\n^^^^^^^^^^^^^^^^^^^^^^^^\n\nBehaves the same as `S3Path.exists`_ except that the version ID must match in addition to the bucket\nand key.\n\n.. _S3Path.glob:\n\nS3Path.glob(pattern)\n^^^^^^^^^^^^^^^^^^^^\n\nGlob the given relative pattern in the Bucket / key prefix represented by this path,\nyielding all matching files (of any kind):\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> [path for path in bucket_path.glob('boto*')]\n   [S3Path('/pypi-proxy/boto3/'), S3Path('/pypi-proxy/botocore/')]\n   >>> [path for path in bucket_path.glob('*/*.html')]\n   [S3Path('/pypi-proxy/requests/index.html'),\n    S3Path('/pypi-proxy/boto3/index.html'),\n    S3Path('/pypi-proxy/botocore/index.html')]]\n\nThe \"**\" pattern means \"this Bucket / key prefix and all sub key prefixes, recursively\".\nIn other words, it enables recursive globbing:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> list(bucket_path.glob('**/*.html'))\n   [S3Path('/pypi-proxy/requests/index.html'),\n    S3Path('/pypi-proxy/index.html'),\n    S3Path('/pypi-proxy/boto3/index.html'),\n    S3Path('/pypi-proxy/botocore/index.html')]\n\n\nIn version 0.4.0:\nNew Algorithm that better suited to s3 API.\nEspecially for recursive searches.\n\nTo enable the old (pathlib common) Algorithm you can configure it like this:\n\n.. code:: python\n\n        register_configuration_parameter(path, glob_new_algorithm=False)\n\nNew version 0.6.0:\nglob implementation will work only with the new algorithm, there for the glob_new_algorithm arg is in depreciation cycle\n\nFor more configuration details please see this `Advanced S3Path configuration`_\n\n**NOTE:** Using the \"**\" pattern in large Buckets may consume an inordinate amount of time in the old algorithm.\n\nS3Path.is_dir()\n^^^^^^^^^^^^^^^\n\nReturns ``True`` if the path points to a Bucket or a key prefix,\n``False`` if it points to a full key path.\n\n``False`` is also returned if the path doesn’t exist.\nOther errors (such as permission errors) are propagated.\n\nS3Path.is_file()\n^^^^^^^^^^^^^^^^\n\nReturns ``True`` if the path points to a Bucket key,\n``False`` if it points to Bucket or a key prefix.\n\n``False`` is also returned if the path doesn’t exist.\nOther errors (such as permission errors) are propagated.\n\nS3Path.is_mount()\n^^^^^^^^^^^^^^^^^\n\nAWS S3 Service doesn't have mounting feature,\nThere for this method will always return ``False``\n\nS3Path.is_symlink()\n^^^^^^^^^^^^^^^^^^^\n\nAWS S3 Service doesn't have symlink feature,\nThere for this method will always return ``False``\n\nS3Path.is_socket()\n^^^^^^^^^^^^^^^^^^\n\nAWS S3 Service doesn't have sockets feature,\nThere for this method will always return ``False``\n\nS3Path.is_fifo()\n^^^^^^^^^^^^^^^^\n\nAWS S3 Service doesn't have fifo feature,\nThere for this method will always return ``False``\n\nS3Path.iterdir()\n^^^^^^^^^^^^^^\n\nWhen the path points to a Bucket or a key prefix,\nyield path objects of the directory contents:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> [path for path in bucket_path.iterdir() if path.is_dir()]\n   [S3Path('/pypi-proxy/requests/'),\n    S3Path('/pypi-proxy/boto3/'),\n    S3Path('/pypi-proxy/botocore/')]\n   >>> boto3_path = bucket_path.joinpath('boto3')\n   >>> [path for path in bucket_path.boto3_path()]\n   [S3Path('/pypi-proxy/boto3/boto3-1.4.1.tar.gz'), S3Path('/pypi-proxy/boto3/index.html')]\n\n.. _S3Path.open:\n\nS3Path.open(mode='r', buffering=-1, encoding=None, errors=None, newline=None)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nOpens the Bucket key pointed to by the path.\nThis delegates to the smart_open library that handles the file streaming.\nreturns a file like object that you can read or write with:\n\n.. code:: python\n\n   >>> with S3Path('/pypi-proxy/botocore/index.html').open() as f:\n   >>>     print(f.read())\n   '<!DOCTYPE html>\n   <html>\n   <head>\n       <meta charset=\"UTF-8\">\n       <title>Package Index</title>\n   </head>\n   <body>\n       <a href=\"botocore-1.4.93.tar.gz\">botocore-1.4.93.tar.gz</a><br>\n   </body>\n   </html>'\n\nVersionedS3Path.open(mode='r', buffering=-1, encoding=None, errors=None, newline=None)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nBehaves the same as `S3Path.open`_ except that ``VersionedS3Path.version_id`` will be used to open\nthe specified version of the object pointed to by the `VersionedS3Path`_ object.\n\nS3Path.owner()\n^^^^^^^^^^^^^^\n\nReturns the name of the user owning the Bucket or key.\nSimilarly to boto3's `ObjectSummary`_ owner attribute\n\nS3Path.read_bytes()\n^^^^^^^^^^^^^^^^^^^\n\nReturn the binary contents of the Bucket key as a bytes object:\n\n.. code:: python\n\n   >>> S3Path('/test_bucket/test.txt').write_bytes(b'Binary file contents')\n   >>> S3Path('/test_bucket/test.txt').read_bytes()\n   b'Binary file contents'\n\nS3Path.read_text(encoding=None, errors=None)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nReturns the decoded contents of the Bucket key as a string:\n\n.. code:: python\n\n   >>> S3Path('/test_bucket/test.txt').write_text('Text file contents')\n   >>> S3Path('/test_bucket/test.txt').read_text()\n   'Text file contents'\n\nS3Path.rename(target)\n^^^^^^^^^^^^^^^^^^^^^\n\nRenames this file or Bucket / key prefix / key to the given target.\nIf target exists and is a file, it will be replaced silently if the user has permission.\nIf path is a key prefix, it will replace all the keys with the same prefix to the new target prefix.\ntarget can be either a string or another S3Path_ object:\n\n.. code:: python\n\n   >>> path = S3Path('/test_bucket/test.txt').write_text('Text file contents')\n   >>> target = S3Path('/test_bucket/new_test.txt')\n   >>> path.rename(target)\n   >>> target.read_text()\n   'Text file contents'\n\nS3Path.replace(target)\n^^^^^^^^^^^^^^^^^^^^^^\n\nRenames this Bucket / key prefix / key to the given target.\nIf target points to an existing Bucket / key prefix / key, it will be unconditionally replaced.\n\nS3Path.rglob(pattern)\n^^^^^^^^^^^^^^^^^^^^^\n\nThis is like calling S3Path.glob_ with ``\"**/\"`` added in front of the given relative pattern:\n\n.. code:: python\n\n   >>> bucket_path = S3Path('/pypi-proxy/')\n   >>> list(bucket_path.rglob('*.html'))\n   [S3Path('/pypi-proxy/requests/index.html'),\n    S3Path('/pypi-proxy/index.html'),\n    S3Path('/pypi-proxy/botocore/index.html')]\n\nVersion 0.4.0:\nNew Algorithm that better suited to s3 API.\nEspecially for recursive searches.\n\nNew version 0.6.0:\nglob implementation will work only with the new algorithm, there for the glob_new_algorithm arg is in depreciation cycle\n\nS3Path.rmdir()\n^^^^^^^^^^^^^^\n\nRemoves this Bucket / key prefix. The Bucket / key prefix must be empty.\n\nS3Path.unlink(missing_ok=False)\n^^^^^^^^^^^^^^^\n\nRemoves this key from S3. Note that this will not remove directories or buckets, but will\ninstead raise an `IsADirectoryError`_. If the key does is not present in the given bucket,\nor if the bucket is not present, raises a `FileNotFoundError`_.\nIf `missing_ok` is `True` then no exception will be raised.\n\nS3Path.samefile(other_path)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nReturns whether this path points to the same Bucket key as other_path,\nwhich can be either a Path object, or a string:\n\n.. code:: python\n\n   >>> path = S3Path('/test_bucket/test.txt')\n   >>> path.samefile(S3Path('/test_bucket/test.txt'))\n   True\n   >>> path.samefile('/test_bucket/fake')\n   False\n\nS3Path.touch(exist_ok=True, \\**kwargs)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nCreates a key at this given path.\nIf the key already exists, the function succeeds if exist_ok is true\n(and its modification time is updated to the current time), otherwise `FileExistsError`_ is raised.\n\nS3Path.write_bytes(data)\n^^^^^^^^^^^^^^^^^^^^^^^^\n\nOpens the key pointed to in bytes mode, write data to it, and close / save the key:\n\n.. code:: python\n\n   >>> S3Path('/test_bucket/test.txt').write_bytes(b'Binary file contents')\n   >>> S3Path('/test_bucket/test.txt').read_bytes()\n   b'Binary file contents'\n\nS3Path.write_text(data, encoding=None, errors=None, newline=None)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nOpens the key pointed to in text mode, writes data to it, and close / save the key:\n\n.. code:: python\n\n   >>> S3Path('/test_bucket/test.txt').write_text('Text file contents')\n   >>> S3Path('/test_bucket/test.txt').read_text()\n   'Text file contents'\n\n**NOTE:** ``newline`` option is only available on Python 3.10 and greater.\n\nS3Path.mkdir(mode=0o777, parents=False, exist_ok=False)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nCreate a path bucket.\n\nAWS S3 Service doesn't support folders, therefore the mkdir method will only create the current bucket.\nIf the bucket path already exists, FileExistsError is raised.\n\nIf exist_ok is false (the default), FileExistsError is raised if the target Bucket already exists.\n\nIf exist_ok is true, OSError exceptions will be ignored.\n\nif parents is false (the default), mkdir will create the bucket only if this is a Bucket path.\n\nif parents is true, mkdir will create the bucket even if the path have a Key path.\n\nmode argument is ignored.\n\nS3Path.get_presigned_url(expire_in: timedelta | int = 3600) -> str\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nReturns a pre-signed url. Anyone with the url can make a GET request to get the file.\nYou can set an expiration date with the expire_in argument (integer or timedelta object).\n\nNote that generating a presigned url may require more information or setup than to use other\nS3Path functions. It's because it needs to know the exact aws region and use s3v4 as signature\nversion. Meaning you may have to do this:\n\n.. code:: python\n\n    >>> import boto3\n    >>> from botocore.config import Config\n    >>> from s3path import S3Path, register_configuration_parameter\n\n    >>> resource = boto3.resource(\n    ...     \"s3\",\n    ...     config=Config(signature_version=\"s3v4\"),\n    ...     region_name=\"the aws region name\"\n    ... )\n    >>> register_configuration_parameter(S3Path(\"/\"), resource=resource)\n\nHere is an example of using a presigned url:\n\n.. code:: python\n\n    >>> from s3path import S3Path\n    >>> import requests\n\n    >>> file = S3Path(\"/my-bucket/toto.txt\")\n    >>> file.write_text(\"hello world\")\n\n    >>> presigned_url = file.get_presigned_url()\n    >>> print(requests.get(presigned_url).content)\n    b\"hello world\"\n\nS3Path.walk(top_down=True, on_error=None, follow_symlinks=False)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nWalks the directory tree rooted at this path, yielding a 3-tuple (dirpath, dirnames, filenames).\nThe dirpath is a string, and dirnames and filenames are lists of strings.\n\nNote that this method in AWS S3 Service is very heavy on API calls.\nIt will be better to glob recursively instead of using this method for most cases.\n\nPure paths:\n===========\n\nFull basic PurePath documentation linked here: `PurePathDocs`_.\n\n.. _PureS3Path:\n\nPureS3Path(\\*pathsegments)\n^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nA subclass of `PurePath`_, this path flavour represents AWS S3 Service semantics.\n\n.. code:: python\n\n   >>> PureS3Path('/<bucket>/<key>')\n   PureS3Path('/<bucket>/<key>')\n\npathsegments are specified similarly to `PurePath`_.\n\n.. _PureVersionedS3Path:\n\nPureVersionedS3Path(\\*pathsegments, version_id)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nA subclass of `PureS3Path`_, this path flavour represents AWS S3 Service semantics for buckets in which `S3 versioning`_ is enabled.\n\n.. code:: python\n\n   >>> from s3path import PureVersionedS3Path\n   >>> PureVersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n   PureVersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n\n| pathsegments are specified similarly to `PurePath`_.\n| version_id is a string that can be any valid `AWS S3 version identifier`_\n|\n| New in version 0.5.0\n\nPureS3Path has a similar behavior to `PurePosixPath`_, except for the below changes:\n------------------------------------------------------------------------------------\n\nDouble dots (``'..'``) are treated as follows.\nThis is different then PurePath since AWS S3 Service doesn't support symbolic links:\n\n.. code:: python\n\n   >>> PureS3Path('foo/../bar')\n   PureS3Path('bar')\n\n**NOTE:** All The methods below will raise `ValueError`_ if the path isn't absolute.\n\nPureS3Path.joinpath(*other)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nIf the final element of ``other`` is a `PureVersionedS3Path`_ instance, the resulting object will\nalso be a `PureVersionedS3Path`_ instance with ``version_id`` set to ``other[-1].version_id``.\nOtherwise, the resulting object will be a `PureS3Path`_ instance.\n\nPureS3Path.as_uri()\n^^^^^^^^^^^^^^^^^^^\n\nRepresents the path as a AWS S3 URI:\n\n.. code:: python\n\n   >>> p = PureS3Path('/pypi-proxy/boto3/')\n   >>> p.as_uri()\n   's3://pypi-proxy/boto3/'\n   >>> p = PureS3Path('/pypi-proxy/boto3/index.html')\n   >>> p.as_uri()\n   's3://pypi-proxy/boto3/index.html'\n\nPureS3Path.from_uri(uri)\n^^^^^^^^^^^^^^^^^^^^^^^^\n\nRepresents a AWS S3 URI as a PureS3Path:\n\n.. code:: python\n\n   >>> PureS3Path.from_uri('s3://pypi-proxy/boto3/index.html')\n   PureS3Path('/pypi-proxy/boto3/index.html')\n\nThis is a new class method.\n\nPureS3Path.from_bucket_key(bucket, key)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nRepresents a AWS S3 Bucket and Key pairs as a PureS3Path:\n\n.. code:: python\n\n   >>> PureS3Path.from_bucket_key('pypi-proxy', 'boto3/index.html')\n   PureS3Path('/pypi-proxy/boto3/index.html')\n\nThis is a new class method.\n\nNew in version 0.3.0.\n\nPureS3Path.bucket\n^^^^^^^^^^^^^^^^^\n\nA string representing the AWS S3 Bucket name, if any:\n\n.. code:: python\n\n   >>> PureS3Path.from_uri('s3://pypi-proxy/boto3/').bucket\n   'pypi-proxy'\n   >>> PureS3Path('/').bucket\n   ''\n\nThis is a new property.\n\nPureS3Path.key\n^^^^^^^^^^^^^^\n\nA string representing the AWS S3 Key name, if any:\n\n.. code:: python\n\n   >>> PureS3Path('/pypi-proxy/boto3/').key\n   'boto3'\n   >>> PureS3Path('/pypi-proxy/boto3/index.html').key\n   'boto3/index.html'\n   >>> PureS3Path.from_uri('s3://pypi-proxy/').key\n   ''\n\nThis is a new property.\n\nPureVersionedS3Path has a similar behavior to `PureS3Path`_, except for the below changes:\n------------------------------------------------------------------------------------------\n\nPureVersionedS3Path.from_uri(uri, *, version_id)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nRepresents a versioned AWS S3 URI as a `PureVersionedS3Path`_:\n\n.. code:: python\n\n   >>> from s3path import PureVersionedS3Path\n   >>> PureVersionedS3Path.from_uri('s3://pypi-proxy/boto3/index.html', version_id='<version_id>')\n   PureVersionedS3Path('/pypi-proxy/boto3/index.html', version_id='<version_id>')\n\nThis is a new class method.\n\nPureVersionedS3Path.from_bucket_key(bucket, key, *, version_id)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nRepresents a versioned AWS S3 Bucket and Key pairs as a `PureVersionedS3Path`_:\n\n.. code:: python\n\n   >>> from s3path import PureVersionedS3Path\n   >>> PureVersionedS3Path.from_bucket_key('pypi-proxy', 'boto3/index.html', version_id='<version_id>')\n   PureVersionedS3Path('/pypi-proxy/boto3/index.html', version_id='<version_id>')\n\nThis is a new class method.\n\nDivision Operator with PureVersionedS3Path\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nThe division of `PureVersionedS3Path`_ instances with other objects will yield the following types:\n\n* ``PureVersionedS3Path / PureVersionedS3Path -> PureVersionedS3Path``\n* ``PureS3Path / PureVersionedS3Path -> PureVersionedS3Path``\n* ``str / PureVersionedS3Path -> PureVersionedS3Path``\n* ``PureVersionedS3Path / PureS3Path -> PureS3Path``\n* ``PureVersionedS3Path / str -> PureS3Path``\n\n.. code:: python\n\n   >>> from s3path import S3Path, VersionedS3Path\n   >>> str_path = \"example/path\"\n   >>> s3_path = S3Path(\"example/path\")\n   >>> versioned_s3_path = VersionedS3Path(\"example/path\", version_id=\"<version_id>\")\n   >>> type(versioned_s3_path / versioned_s3_path)\n   <<< s3path.VersionedS3Path\n   >>> type(s3_path / versioned_s3_path)\n   <<< s3path.VersionedS3Path\n   >>> type(str_path / versioned_s3_path)\n   <<< s3path.VersionedS3Path\n   >>> type(versioned_s3_path / s3_path)\n   <<< s3path.S3Path\n   >>> type(versioned_s3_path / str_path)\n   <<< s3path.S3Path\n\nUnsupported Methods:\n====================\n\nThere are several methods that are not supported in S3Path.\nAll of them will raise `NotImplementedError`_.\n\nFor example AWS S3 Service doesn't have a current directory:\n\n.. code:: python\n\n   >>> S3Path('/test_bucket/test.txt').cwd()\n   Traceback (most recent call last):\n     File \"<stdin>\", line 1, in <module>\n     File \"/home/lior/lior_env/s3path/s3path.py\", line 235, in cwd\n   raise NotImplementedError(message)\n   NotImplementedError: PathNotSupportedMixin.cwd is unsupported on AWS S3 service\n\nHere is a list of all unsupported methods:\n\n- classmethod S3Path.cwd()\n- classmethod S3Path.home()\n- S3Path.chmod(mode, *, follow_symlinks=True)\n- S3Path.expanduser()\n- S3Path.lchmod(mode)\n- S3Path.group()\n- S3Path.is_block_device()\n- S3Path.is_char_device()\n- S3Path.lstat()\n- S3Path.resolve()\n- S3Path.symlink_to(target, target_is_directory=False)\n\n\n.. _pathlib : https://docs.python.org/3/library/pathlib.html\n.. _os.stat_result : https://docs.python.org/3/library/os.html#os.stat_result\n.. _PurePathDocs : https://docs.python.org/3/library/pathlib.html#pure-paths\n.. _PurePath : https://docs.python.org/3/library/pathlib.html#pathlib.PurePath\n.. _PurePosixPath : https://docs.python.org/3/library/pathlib.html#pathlib.PurePosixPath\n.. _PathDocs : https://docs.python.org/3/library/pathlib.html#concrete-paths\n.. _Path : https://docs.python.org/3/library/pathlib.html#pathlib.Path\n.. _boto3 : https://github.com/boto/boto3\n.. _ValueError : https://docs.python.org/3/library/exceptions.html#ValueError\n.. _FileExistsError : https://docs.python.org/3/library/exceptions.html#FileExistsError\n.. _IsADirectoryError : https://docs.python.org/3/library/exceptions.html#IsADirectoryError\n.. _NotImplementedError : https://docs.python.org/3/library/exceptions.html#NotImplementedError\n.. _ObjectSummary : https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#objectsummary\n.. _Abstract pathlib interface: https://github.com/liormizr/s3path/blob/master/docs/interface.rst\n.. _S3 versioning : https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html\n.. _AWS S3 version identifier : https://docs.aws.amazon.com/AmazonS3/latest/userguide/versioning-workflows.html#version-ids\n"
  },
  {
    "path": "s3path/__init__.py",
    "content": "\"\"\"\ns3path provides a Pythonic API to S3 by wrapping boto3 with pathlib interface\n\"\"\"\nimport sys\nfrom pathlib import Path\nfrom . import accessor\n\n__version__ = '0.6.5'\n__all__ = (\n    'Path',\n    'register_configuration_parameter',\n    'configuration_map',\n    'StatResult',\n    'PureS3Path',\n    'S3Path',\n    'VersionedS3Path',\n    'PureVersionedS3Path',\n)\n\nif sys.version_info >= (3, 12):\n    from .accessor import StatResult, configuration_map\n    from .current_version import (\n        S3Path,\n        PureS3Path,\n        VersionedS3Path,\n        PureVersionedS3Path,\n        register_configuration_parameter,\n    )\nelse:\n    from .old_versions import (\n        StatResult,\n        S3Path,\n        PureS3Path,\n        _s3_accessor,\n        VersionedS3Path,\n        PureVersionedS3Path,\n        register_configuration_parameter,\n    )\n    configuration_map = _s3_accessor.configuration_map\n"
  },
  {
    "path": "s3path/accessor.py",
    "content": "import sys\nimport importlib.util\nfrom warnings import warn\nfrom os import stat_result\nfrom threading import Lock\nfrom itertools import chain\nfrom collections import deque\nfrom functools import lru_cache\nfrom contextlib import suppress\nfrom collections import namedtuple\nfrom io import UnsupportedOperation\n\n\ndef _lazy_import_resources(name):\n    if name in sys.modules:\n        return sys.modules[name]\n    spec = importlib.util.find_spec(name)\n    loader = importlib.util.LazyLoader(spec.loader)\n    spec.loader = loader\n    module = importlib.util.module_from_spec(spec)\n    sys.modules[name] = module\n    loader.exec_module(module)\n    return module\n\n\nboto3 = _lazy_import_resources('boto3')\nsmart_open = _lazy_import_resources('smart_open')\n# For Development on Cli, or in general application that require fast startup\n# This will lazy load boto3 resources\n# boto3 increase startup time by X10!\n\n\nclass StatResult(namedtuple('BaseStatResult', 'size, last_modified, version_id', defaults=(None,))):\n    \"\"\"\n    Base of os.stat_result but with boto3 s3 features\n    \"\"\"\n\n    def __getattr__(self, item):\n        if item in vars(stat_result):\n            raise UnsupportedOperation(f'{type(self).__name__} do not support {item} attribute')\n        return super().__getattribute__(item)\n\n    @property\n    def st_size(self) -> int:\n        return self.size\n\n    @property\n    def st_mtime(self) -> float:\n        return self.last_modified.timestamp()\n\n    @property\n    def st_version_id(self) -> str:\n        return self.version_id\n\n\ndef stat(path, *, follow_symlinks=True):\n    if not follow_symlinks:\n        raise NotImplementedError(\n            f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n    resource, config = configuration_map.get_configuration(path)\n    if _is_versioned_path(path):\n        object_summary = _boto3_method_with_parameters(\n            resource.ObjectVersion(path.bucket, path.key, path.version_id).get,\n            config=config,\n        )\n        return StatResult(\n            size=object_summary.get('ContentLength'),\n            last_modified=object_summary.get('LastModified'),\n            version_id=object_summary.get('VersionId'))\n    object_summary = resource.ObjectSummary(path.bucket, path.key)\n    return StatResult(\n        size=object_summary.size,\n        last_modified=object_summary.last_modified,\n        version_id=None)\n\n\ndef owner(path):\n    bucket_name = path.bucket\n    key_name = path.key\n    resource, config = configuration_map.get_configuration(path)\n    object_summary = resource.ObjectSummary(bucket_name, key_name)\n    # return object_summary.owner['DisplayName']\n    # This is a hack till boto3 resolve this issue:\n    # https://github.com/boto/boto3/issues/1950\n    response = _boto3_method_with_parameters(\n        object_summary.meta.client.list_objects_v2,\n        kwargs={\n            'Bucket': object_summary.bucket_name,\n            'Prefix': object_summary.key,\n            'FetchOwner': True,\n        },\n        config=config,\n    )\n    return response['Contents'][0]['Owner']['DisplayName']\n\n\ndef rename(path, target):\n    source_bucket_name = path.bucket\n    source_key_name = path.key\n    target_bucket_name = target.bucket\n    target_key_name = target.key\n\n    resource, config = configuration_map.get_configuration(path)\n    allowed_copy_args = boto3.s3.transfer.TransferManager.ALLOWED_COPY_ARGS\n\n    if not is_dir(path):\n        target_bucket = resource.Bucket(target_bucket_name)\n        object_summary = resource.ObjectSummary(source_bucket_name, source_key_name)\n        old_source = {'Bucket': object_summary.bucket_name, 'Key': object_summary.key}\n        _boto3_method_with_extraargs(\n            target_bucket.copy,\n            config=config,\n            args=(old_source, target_key_name),\n            allowed_extra_args=allowed_copy_args)\n        _boto3_method_with_parameters(object_summary.delete)\n        return\n\n    bucket = resource.Bucket(source_bucket_name)\n    target_bucket = resource.Bucket(target_bucket_name)\n    for object_summary in bucket.objects.filter(Prefix=source_key_name):\n        old_source = {'Bucket': object_summary.bucket_name, 'Key': object_summary.key}\n        new_key = object_summary.key.replace(source_key_name, target_key_name)\n        _, config = configuration_map.get_configuration(type(path)(target_bucket_name, new_key))\n        _boto3_method_with_extraargs(\n            target_bucket.copy,\n            config=config,\n            args=(old_source, new_key),\n            allowed_extra_args=allowed_copy_args)\n        _boto3_method_with_parameters(object_summary.delete)\n\n\nreplace = rename\n\n\ndef rmdir(path):\n    bucket_name = path.bucket\n    key_name = path.key\n    resource, config = configuration_map.get_configuration(path)\n    bucket = resource.Bucket(bucket_name)\n    for object_summary in bucket.objects.filter(Prefix=key_name):\n        _boto3_method_with_parameters(object_summary.delete, config=config)\n    if path.is_bucket:\n        _boto3_method_with_parameters(bucket.delete, config=config)\n\n\ndef mkdir(path, mode):\n    resource, config = configuration_map.get_configuration(path)\n    _boto3_method_with_parameters(\n        resource.create_bucket,\n        config=config,\n        kwargs={'Bucket': path.bucket},\n    )\n\n\ndef is_dir(path):\n    if str(path) == path.root:\n        return True\n\n    resource, config = configuration_map.get_configuration(path)\n    bucket = resource.Bucket(path.bucket)\n    query = _boto3_method_with_parameters(\n        bucket.objects.filter,\n        kwargs={'Prefix': _generate_prefix(path)},\n        config=config)\n    return any(query)\n\n\ndef exists(path):\n    bucket_name = path.bucket\n    resource, config = configuration_map.get_configuration(path)\n\n    if not path.key:\n        # Check whether or not the bucket exists.\n        # See https://stackoverflow.com/questions/26871884\n        try:\n            _boto3_method_with_parameters(\n                resource.meta.client.head_bucket,\n                kwargs={'Bucket': bucket_name},\n                config=config)\n            return True\n        except Exception as client_error:\n            with suppress(AttributeError, KeyError):\n                error_code = client_error.response['Error']['Code']\n                if error_code == '404':\n                    # Not found\n                    return False\n            raise client_error\n\n    bucket = resource.Bucket(bucket_name)\n    key_name = str(path.key)\n\n    def query_method():\n        method = bucket.object_versions.filter if _is_versioned_path(path) else bucket.objects.filter\n        return _boto3_method_with_parameters(\n            method,\n            kwargs={'Prefix': key_name},\n            config=config)\n\n    if _is_versioned_path(path):\n        for object in query_method():\n            if object.version_id != path.version_id:\n                continue\n            if object.key == key_name:\n                return True\n            if object.key.startswith(key_name + path._flavour.sep):\n                return True\n        return False\n\n    for object in query_method():\n        if object.key == key_name:\n            return True\n        if object.key.startswith(key_name + path._flavour.sep):\n            return True\n    return False\n\n\ndef iter_keys(path, *, prefix=None, full_keys=True):\n    resource, config = configuration_map.get_configuration(path)\n    bucket_name = path.bucket\n\n    def get_keys():\n        continuation_token = None\n        while True:\n            if continuation_token:\n                kwargs['ContinuationToken'] = continuation_token\n            response = _boto3_method_with_parameters(\n                resource.meta.client.list_objects_v2,\n                kwargs=kwargs,\n                config=config,\n            )\n            for file in response.get('Contents', ()):\n                yield file['Key']\n            for folder in response.get('CommonPrefixes', ()):\n                yield folder['Prefix']\n            if not response.get('IsTruncated'):\n                break\n            continuation_token = response.get('NextContinuationToken')\n\n    # get buckets\n    if not bucket_name and not full_keys:\n        query = _boto3_method_with_parameters(\n            resource.buckets.filter,\n            config=config)\n        for bucket in query:\n            yield bucket.name\n        return\n    # get keys in buckets\n    if not bucket_name:\n        query = _boto3_method_with_parameters(\n            resource.buckets.filter,\n            config=config)\n        for bucket in query:\n            kwargs = {'Bucket': bucket.name}\n            yield from get_keys()\n        return\n    # get keys or part of keys in buckets\n    kwargs = {'Bucket': bucket_name}\n    if prefix:\n        kwargs['Prefix'] = prefix\n    if not full_keys:\n        kwargs['Delimiter'] = path._flavour.sep\n    yield from get_keys()\n\n\ndef scandir(path):\n    return _S3Scandir(path=path)\n\n\ndef open(path, *, mode='r', buffering=-1, encoding=None, errors=None, newline=None):\n    resource, config = configuration_map.get_configuration(path)\n\n    dummy_object = resource.Object('bucket', 'key')\n    get_object_kwargs = _update_kwargs_with_config(\n        dummy_object.meta.client.get_object, config=config)\n    create_multipart_upload_kwargs = _update_kwargs_with_config(\n        dummy_object.meta.client.create_multipart_upload, config=config)\n\n\n    transport_params = {'defer_seek': True}\n    if _is_versioned_path(path):\n        transport_params['version_id'] = path.version_id\n\n    transport_params.update(\n        client=resource.meta.client,\n        client_kwargs={\n            'S3.Client.get_object': get_object_kwargs,\n            'S3.Client.create_multipart_upload': create_multipart_upload_kwargs,\n        },\n    )\n\n    return smart_open.open(\n        uri=\"s3:/\" + str(path),\n        mode=mode,\n        buffering=buffering,\n        encoding=encoding,\n        errors=errors,\n        newline=newline,\n        compression='disable',\n        transport_params=transport_params)\n\n\ndef get_presigned_url(path, expire_in: int) -> str:\n    resource, config = configuration_map.get_configuration(path)\n    return _boto3_method_with_parameters(\n        resource.meta.client.generate_presigned_url,\n        config=config,\n        kwargs={\n            'ClientMethod': 'get_object',\n            'Params': {'Bucket': path.bucket, 'Key': path.key},\n            'ExpiresIn': expire_in,\n        }\n    )\n\n\ndef _generate_prefix(path):\n    sep = path._flavour.sep\n    if not path.key:\n        return ''\n    key_name = path.key\n    if not key_name.endswith(sep):\n        return key_name + sep\n    return key_name\n\n\ndef unlink(path, *args, **kwargs):\n    bucket_name = path.bucket\n    key_name = path.key\n    resource, config = configuration_map.get_configuration(path)\n    bucket = resource.Bucket(bucket_name)\n    try:\n        _boto3_method_with_parameters(\n            bucket.meta.client.delete_object,\n            config=config,\n            kwargs={\"Bucket\": bucket_name, \"Key\": key_name}\n        )\n    except Exception as error:\n        raise OSError(f'/{bucket_name}/{key_name}') from error\n\n\ndef walk(path, *, topdown=True, onerror=None, followlinks=False):\n    try:\n        if not exists(path):\n            raise FileNotFoundError(f'No such file or directory: {path}')\n    except FileNotFoundError as error:\n        if onerror is not None:\n            onerror(error)\n        return\n\n    stack = deque([path])\n\n    while stack:\n        top = stack.pop()\n        if isinstance(top, tuple):\n            yield top\n            continue\n\n        dirs = []\n        nondirs = []\n        walk_dirs = []\n\n        cont = False\n        with scandir(top) as scandir_iter:\n            scandir_iter = iter(scandir_iter)\n            while True:\n                try:\n                    entry = next(scandir_iter)\n                    is_dir = entry.is_dir()\n                    if is_dir:\n                        dirs.append(entry.name)\n                    else:\n                        nondirs.append(entry.name)\n\n                    if not topdown and is_dir:\n                        walk_dirs.append(top / entry.name)\n                except StopIteration:\n                    break\n                except Exception as error:\n                    if onerror is not None:\n                        onerror(error)\n                    cont = True\n                    break\n        if cont:\n            continue\n\n        if topdown:\n            # Yield before sub-directory traversal if going top down\n            yield top, dirs, nondirs\n            # Traverse into sub-directories\n            for dirname in reversed(dirs):\n                new_path = top / dirname\n                stack.append(new_path)\n        else:\n            # Yield after sub-directory traversal if going bottom up\n            stack.append((top, dirs, nondirs))\n            # Traverse into sub-directories\n            for new_path in reversed(walk_dirs):\n                stack.append(new_path)\n\n\ndef _is_versioned_path(path):\n    return hasattr(path, 'version_id') and bool(path.version_id)\n\n\ndef _update_kwargs_with_config(boto3_method, config, kwargs=None):\n    kwargs = kwargs or {}\n    if config is not None:\n        kwargs.update({\n            key: value\n            for key, value in config.items()\n            if key in _get_action_arguments(boto3_method)\n        })\n    return kwargs\n\n\ndef _boto3_method_with_parameters(boto3_method, config=None, args=(), kwargs=None):\n    kwargs = _update_kwargs_with_config(boto3_method, config, kwargs)\n    return boto3_method(*args, **kwargs)\n\n\ndef _boto3_method_with_extraargs(\n        boto3_method,\n        config=None,\n        args=(),\n        kwargs=None,\n        extra_args=None,\n        allowed_extra_args=()):\n    kwargs = kwargs or {}\n    extra_args = extra_args or {}\n    if config is not None:\n        extra_args.update({\n            key: value\n            for key, value in config.items()\n            if key in allowed_extra_args\n        })\n    kwargs[\"ExtraArgs\"] = extra_args\n    return boto3_method(*args, **kwargs)\n\n\n@lru_cache()\ndef _get_action_arguments(action):\n    docs = action.__doc__\n    with suppress(AttributeError):\n        docs = action.__doc__._generate()\n    return set(\n        line.replace(':param ', '').strip().strip(':')\n        for line in docs.splitlines()\n        if line.startswith(':param ')\n    )\n\n\nclass _S3Scandir:\n    def __init__(self, *, path):\n        self._path = path\n\n    def __enter__(self):\n        return self\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n\n    def __iter__(self):\n        bucket_name = self._path.bucket\n        resource, config = configuration_map.get_configuration(self._path)\n        if not bucket_name:\n            query = _boto3_method_with_parameters(\n                resource.buckets.all,\n                config=config)\n            for bucket in query:\n                yield _S3DirEntry(bucket.name, is_dir=True)\n            return\n        bucket = resource.Bucket(bucket_name)\n        sep = self._path._flavour.sep\n\n        kwargs = {\n            'Bucket': bucket.name,\n            'Prefix': _generate_prefix(self._path),\n            'Delimiter': sep}\n\n        continuation_token = None\n        while True:\n            if continuation_token:\n                kwargs['ContinuationToken'] = continuation_token\n            response = _boto3_method_with_parameters(\n                    bucket.meta.client.list_objects_v2,\n                    kwargs=kwargs,\n                    config=config)\n\n            for folder in response.get('CommonPrefixes', ()):\n                full_name = folder['Prefix'][:-1] if folder['Prefix'].endswith(sep) else folder['Prefix']\n                name = full_name.split(sep)[-1]\n                yield _S3DirEntry(name, is_dir=True)\n\n            for file in response.get('Contents', ()):\n                if file['Key'] == response['Prefix']:\n                    continue\n                name = file['Key'].split(sep)[-1]\n                yield _S3DirEntry(name=name, is_dir=False, size=file['Size'], last_modified=file['LastModified'])\n\n            if not response.get('IsTruncated'):\n                break\n            continuation_token = response.get('NextContinuationToken')\n\n\nclass _S3DirEntry:\n    def __init__(self, name, is_dir, size=None, last_modified=None):\n        self.name = name\n        self._is_dir = is_dir\n        self._stat = StatResult(size=size, last_modified=last_modified)\n\n    def __repr__(self):\n        return f'{type(self).__name__}(name={self.name}, is_dir={self._is_dir}, stat={self._stat})'\n\n    def inode(self, *args, **kwargs):\n        return None\n\n    def is_dir(self, follow_symlinks=False):\n        if follow_symlinks:\n            raise TypeError('AWS S3 Service does not have symlink feature')\n        return self._is_dir\n\n    def is_file(self):\n        return not self._is_dir\n\n    def is_symlink(self, *args, **kwargs):\n        return False\n\n    def stat(self):\n        return self._stat\n\n\nclass _S3ConfigurationMap:\n    def __init__(self):\n        self.arguments = None\n        self.resources = None\n        self.general_options = None\n        self.setup_lock = Lock()\n        self.is_setup = False\n\n    def __repr__(self):\n        return f'{type(self).__name__}' \\\n               f'(arguments={self.arguments}, resources={self.resources}, is_setup={self.is_setup})'\n\n    @property\n    def default_resource(self):\n        return boto3.resource('s3')\n\n    def set_configuration(self, path, *, resource=None, arguments=None):\n        self._delayed_setup()\n        path_name = str(path)\n        if arguments is not None:\n            self.arguments[path_name] = arguments\n        if resource is not None:\n            self.resources[path_name] = resource\n        self.get_configuration.cache_clear()\n\n    @lru_cache()\n    def get_configuration(self, path):\n        self._delayed_setup()\n        resources = arguments = None\n        for path in chain([path], path.parents):\n            path_name = str(path)\n            if resources is None and path_name in self.resources:\n                resources = self.resources[path_name]\n            if arguments is None and path_name in self.arguments:\n                arguments = self.arguments[path_name]\n        return resources, arguments\n\n    @lru_cache()\n    def get_general_options(self, path):\n        self._delayed_setup()\n        for path in chain([path], path.parents):\n            path_name = str(path)\n            if path_name in self.general_options:\n                return self.general_options[path_name]\n        return\n\n    def _delayed_setup(self):\n        \"\"\" Resolves a circular dependency between us and PureS3Path \"\"\"\n        with self.setup_lock:\n            if not self.is_setup:\n                self.arguments = {'/': {}}\n                self.resources = {'/': self.default_resource}\n                self.is_setup = True\n\n\nconfiguration_map = _S3ConfigurationMap()\n"
  },
  {
    "path": "s3path/current_version.py",
    "content": "from __future__ import annotations\n\nimport re\nimport sys\nimport fnmatch\nimport posixpath\nfrom datetime import timedelta\nfrom contextlib import suppress\nfrom urllib.parse import unquote\nfrom pathlib import PurePath, Path\nfrom typing import TYPE_CHECKING, Literal, Self, Generator\nfrom io import DEFAULT_BUFFER_SIZE, TextIOWrapper\n\nfrom botocore.exceptions import ClientError\n\nif TYPE_CHECKING:\n    from os import PathLike\n    import smart_open\n    from boto3.resources.base import ServiceResource\n    KeyFileObjectType = TextIOWrapper | smart_open.s3.Reader | smart_open.s3.MultipartWriter\n\nfrom . import accessor\n\n\ndef register_configuration_parameter(\n        path: PureS3Path,\n        *,\n        parameters: dict | None = None,\n        resource: ServiceResource | None = None):\n    if not isinstance(path, PureS3Path):\n        raise TypeError(f'path argument have to be a {PurePath} type. got {type(path)}')\n    if parameters and not isinstance(parameters, dict):\n        raise TypeError(f'parameters argument have to be a dict type. got {type(path)}')\n    if parameters is None and resource is None:\n        raise ValueError('user have to specify parameters or resource arguments')\n    accessor.configuration_map.set_configuration(\n        path,\n        resource=resource,\n        arguments=parameters)\n\n\nclass _S3Parser:\n    def __getattr__(self, name):\n        return getattr(posixpath, name)\n\n\nclass PureS3Path(PurePath):\n    \"\"\"\n    PurePath subclass for AWS S3 service.\n\n    S3 is not a file-system, but we can look at it like a POSIX system.\n    \"\"\"\n\n    parser = _flavour = _S3Parser()  # _flavour is not relevant after Python version 3.13\n\n    __slots__ = ()\n\n    def __init__(self, *args):\n        super().__init__(*args)\n\n        new_parts = list(self.parts)\n        for part in new_parts[1:]:\n            if part == '..':\n                index = new_parts.index(part)\n                new_parts.pop(index - 1)\n                new_parts.remove(part)\n\n        self._raw_paths = new_parts\n        if sys.version_info >= (3, 13):\n            self._drv, self._root, self._tail_cached = self._parse_path(self._raw_path)\n        else:\n            self._load_parts()\n\n    @classmethod\n    def from_uri(cls, uri: str) -> Self:\n        \"\"\"\n        from_uri class method create a class instance from url\n\n        >> from s3path import PureS3Path\n        >> PureS3Path.from_uri('s3://<bucket>/<key>')\n        << PureS3Path('/<bucket>/<key>')\n        \"\"\"\n        if not uri.startswith('s3://'):\n            raise ValueError('Provided uri seems to be no S3 URI!')\n        unquoted_uri = unquote(uri)\n        return cls(unquoted_uri[4:])\n\n    @classmethod\n    def from_bucket_key(cls, bucket: str | PathLike, key: str | PathLike) -> Self:\n        \"\"\"\n        from_bucket_key class method create a class instance from bucket, key pair's\n\n        >> from s3path import PureS3Path\n        >> PureS3Path.from_bucket_key(bucket='<bucket>', key='<key>')\n        << PureS3Path('/<bucket>/<key>')\n        \"\"\"\n        bucket = cls(cls.parser.sep, bucket)\n        if len(bucket.parts) != 2:\n            raise ValueError(f'bucket argument contains more then one path element: {bucket}')\n        key = cls(key)\n        if key.is_absolute():\n            key = key.relative_to('/')\n        return bucket / key\n\n    @property\n    def bucket(self) -> str:\n        \"\"\"\n        The AWS S3 Bucket name, or ''\n        \"\"\"\n        self._absolute_path_validation()\n        with suppress(ValueError):\n            _, bucket, *_ = self.parts\n            return bucket\n        return ''\n\n    @property\n    def is_bucket(self) -> bool:\n        \"\"\"\n        Check if Path is a bucket\n        \"\"\"\n        return self.is_absolute() and self == PureS3Path(f\"/{self.bucket}\")\n\n    @property\n    def key(self) -> str:\n        \"\"\"\n        The AWS S3 Key name, or ''\n        \"\"\"\n        self._absolute_path_validation()\n        key = self.parser.sep.join(self.parts[2:])\n        return key\n\n    def as_uri(self) -> str:\n        \"\"\"\n        Return the path as a 's3' URI.\n        \"\"\"\n        uri = super().as_uri()\n        return uri.replace('file:///', 's3://')\n\n    def _absolute_path_validation(self):\n        if not self.is_absolute():\n            raise ValueError('relative path have no bucket, key specification')\n\n\nclass _PathNotSupportedMixin:\n    _NOT_SUPPORTED_MESSAGE = '{method} is unsupported on S3 service'\n\n    @classmethod\n    def cwd(cls):\n        \"\"\"\n        cwd class method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = cls._NOT_SUPPORTED_MESSAGE.format(method=cls.cwd.__qualname__)\n        raise NotImplementedError(message)\n\n    @classmethod\n    def home(cls):\n        \"\"\"\n        home class method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = cls._NOT_SUPPORTED_MESSAGE.format(method=cls.home.__qualname__)\n        raise NotImplementedError(message)\n\n    def chmod(self, mode, *, follow_symlinks=True):\n        \"\"\"\n        chmod method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.chmod.__qualname__)\n        raise NotImplementedError(message)\n\n    def expanduser(self):\n        \"\"\"\n        expanduser method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.expanduser.__qualname__)\n        raise NotImplementedError(message)\n\n    def lchmod(self, mode):\n        \"\"\"\n        lchmod method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.lchmod.__qualname__)\n        raise NotImplementedError(message)\n\n    def group(self):\n        \"\"\"\n        group method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.group.__qualname__)\n        raise NotImplementedError(message)\n\n    def is_block_device(self):\n        \"\"\"\n        is_block_device method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.is_block_device.__qualname__)\n        raise NotImplementedError(message)\n\n    def is_char_device(self):\n        \"\"\"\n        is_char_device method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.is_char_device.__qualname__)\n        raise NotImplementedError(message)\n\n    def lstat(self):\n        \"\"\"\n        lstat method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.lstat.__qualname__)\n        raise NotImplementedError(message)\n\n    def resolve(self):\n        \"\"\"\n        resolve method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.resolve.__qualname__)\n        raise NotImplementedError(message)\n\n    def symlink_to(self, *args, **kwargs):\n        \"\"\"\n        symlink_to method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.symlink_to.__qualname__)\n        raise NotImplementedError(message)\n\n    def hardlink_to(self, *args, **kwargs):\n        \"\"\"\n        hardlink_to method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.hardlink_to.__qualname__)\n        raise NotImplementedError(message)\n\n    def readlink(self):\n        \"\"\"\n        readlink method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.readlink.__qualname__)\n        raise NotImplementedError(message)\n\n    def is_symlink(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have symlink feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_socket(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have sockets feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_fifo(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have fifo feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_mount(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have mounting feature, There for this method will always return False\n        \"\"\"\n        return False\n\n\n\nclass S3Path(_PathNotSupportedMixin, PureS3Path, Path):\n    def stat(self, *, follow_symlinks: bool = True) -> accessor.StatResult | None:\n        \"\"\"\n        Returns information about this path (similarly to boto3's ObjectSummary).\n        For compatibility with pathlib, the returned object some similar attributes like os.stat_result.\n        The result is looked up at each call to this method\n        \"\"\"\n        if not follow_symlinks:\n            raise NotImplementedError(\n                f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n\n        self._absolute_path_validation()\n        if not self.key:\n            return None\n        return accessor.stat(self, follow_symlinks=follow_symlinks)\n\n    def absolute(self) -> Self:\n        \"\"\"\n        Handle absolute method only if the path is already an absolute one\n        since we have no way to compute an absolute path from a relative one in S3.\n        \"\"\"\n        if self.is_absolute():\n            return self\n        # We can't compute the absolute path from a relative one\n        raise ValueError(\"Absolute path can't be determined for relative S3Path objects\")\n\n    def owner(self, *, follow_symlinks: bool = False) -> str:\n        \"\"\"\n        Returns the name of the user owning the Bucket or key.\n        Similarly to boto3's ObjectSummary owner attribute\n        \"\"\"\n        self._absolute_path_validation()\n        if follow_symlinks:\n            raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        if not self.is_file():\n            raise KeyError('file not found')\n        return accessor.owner(self)\n\n    def rename(self, target) -> Self:\n        \"\"\"\n        Renames this file or Bucket / key prefix / key to the given target.\n        If target exists and is a file, it will be replaced silently if the user has permission.\n        If path is a key prefix, it will replace all the keys with the same prefix to the new target prefix.\n        Target can be either a string or another S3Path object.\n        \"\"\"\n        self._absolute_path_validation()\n        if not isinstance(target, type(self)):\n            target = type(self)(target)\n        target._absolute_path_validation()\n        accessor.rename(self, target)\n        return type(self)(target)\n\n    def replace(self, target) -> Self:\n        \"\"\"\n        Renames this Bucket / key prefix / key to the given target.\n        If target points to an existing Bucket / key prefix / key, it will be unconditionally replaced.\n        \"\"\"\n        return self.rename(target)\n\n    def rmdir(self):\n        \"\"\"\n        Removes this Bucket / key prefix. The Bucket / key prefix must be empty\n        \"\"\"\n        self._absolute_path_validation()\n        if self.is_file():\n            raise NotADirectoryError()\n        if not self.is_dir():\n            raise FileNotFoundError()\n        accessor.rmdir(self)\n\n    def samefile(self, other_path: str | PathLike) -> bool:\n        \"\"\"\n        Returns whether this path points to the same Bucket key as other_path,\n        Which can be either a Path object, or a string\n        \"\"\"\n        self._absolute_path_validation()\n        if not isinstance(other_path, S3Path):\n            other_path = type(self)(other_path)\n        return self.bucket == other_path.bucket and self.key == other_path.key and self.is_file()\n\n    def touch(self, mode: int = 0o666, exist_ok: bool = True):\n        \"\"\"\n        Creates a key at this given path.\n        If the key already exists,\n        the function succeeds if exist_ok is true (and its modification time is updated to the current time),\n        otherwise FileExistsError is raised\n        \"\"\"\n        if self.exists() and not exist_ok:\n            raise FileExistsError()\n        self.write_text('')\n\n    def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False):\n        \"\"\"\n        Create a path bucket.\n        AWS S3 Service doesn't support folders, therefore the mkdir method will only create the current bucket.\n        If the bucket path already exists, FileExistsError is raised.\n\n        If exist_ok is false (the default), FileExistsError is raised if the target Bucket already exists.\n        If exist_ok is true, OSError exceptions will be ignored.\n\n        if parents is false (the default), mkdir will create the bucket only if this is a Bucket path.\n        if parents is true, mkdir will create the bucket even if the path have a Key path.\n\n        mode argument is ignored.\n        \"\"\"\n        try:\n            if not self.bucket:\n                raise FileNotFoundError(f'No bucket in {type(self)} {self}')\n            if self.key and not parents:\n                raise FileNotFoundError(f'Only bucket path can be created, got {self}')\n            if type(self)(self.parser.sep, self.bucket).exists():\n                raise FileExistsError(f'Bucket {self.bucket} already exists')\n            accessor.mkdir(self, mode)\n        except OSError:\n            if not exist_ok:\n                raise\n\n    def is_dir(self, *, follow_symlinks: bool = False) -> bool:\n        \"\"\"\n        Returns True if the path points to a Bucket or a key prefix, False if it points to a full key path.\n        False is also returned if the path doesn’t exist.\n        Other errors (such as permission errors) are propagated.\n        \"\"\"\n        self._absolute_path_validation()\n        if follow_symlinks:\n            raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        if self.bucket and not self.key:\n            return True\n        return accessor.is_dir(self)\n\n    def is_file(self, *, follow_symlinks: bool = False) -> bool:\n        \"\"\"\n        Returns True if the path points to a Bucket key, False if it points to Bucket or a key prefix.\n        False is also returned if the path doesn’t exist.\n        Other errors (such as permission errors) are propagated.\n        \"\"\"\n        self._absolute_path_validation()\n        if follow_symlinks:\n            raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        if not self.bucket or not self.key:\n            return False\n        try:\n            return bool(self.stat())\n        except ClientError:\n            return False\n\n    def exists(self, *, follow_symlinks: bool = False) -> bool:\n        \"\"\"\n        Whether the path points to an existing Bucket, key or key prefix.\n        \"\"\"\n        self._absolute_path_validation()\n        if follow_symlinks:\n            raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        if not self.bucket:\n            return True\n        return accessor.exists(self)\n\n    def iterdir(self) -> Generator[Self]:\n        \"\"\"\n        When the path points to a Bucket or a key prefix, yield path objects of the directory contents\n        \"\"\"\n        self._absolute_path_validation()\n        with accessor.scandir(self) as scandir_iter:\n            for entry in scandir_iter:\n                path = self / entry.name\n                yield path\n\n    def open(\n            self,\n            mode: Literal['r', 'w', 'rb', 'wb'] = 'r',\n            buffering: int = DEFAULT_BUFFER_SIZE,\n            encoding: str | None = None,\n            errors: str | None = None,\n            newline: str | None = None) -> KeyFileObjectType:\n        \"\"\"\n        Opens the Bucket key pointed to by the path, returns a Key file object that you can read/write with\n        \"\"\"\n        self._absolute_path_validation()\n        if 'r' in mode and not self.exists():\n            raise FileNotFoundError(f'No such file or directory: {self}')\n        return accessor.open(\n            self,\n            mode=mode,\n            buffering=buffering,\n            encoding=encoding,\n            errors=errors,\n            newline=newline)\n\n    def glob(\n            self,\n            pattern: str, *,\n            case_sensitive: bool | None = None,\n            recurse_symlinks: bool = False) -> Generator[Self]:\n        \"\"\"\n        Glob the given relative pattern in the Bucket / key prefix represented by this path,\n        yielding all matching files (of any kind)\n\n        The glob method is using a new Algorithm that better fit S3 API\n        \"\"\"\n        self._absolute_path_validation()\n        if case_sensitive is False or recurse_symlinks is True:\n            raise ValueError('Glob is case-sensitive and no symbolic links are allowed')\n\n        sys.audit(\"pathlib.Path.glob\", self, pattern)\n        if not pattern:\n            raise ValueError(f'Unacceptable pattern: {pattern}')\n        drv, root, pattern_parts = self._parse_path(pattern)\n        if drv or root:\n            raise NotImplementedError(\"Non-relative patterns are unsupported\")\n        for part in pattern_parts:\n            if part != '**' and '**' in part:\n                raise ValueError(\"Invalid pattern: '**' can only be an entire path component\")\n        selector = _Selector(self, pattern=pattern)\n        yield from selector.select()\n\n    def rglob(\n            self,\n            pattern: str, *,\n            case_sensitive: bool | None = None,\n            recurse_symlinks: bool = False) -> Generator[Self]:\n        \"\"\"\n        This is like calling S3Path.glob with \"**/\" added in front of the given relative pattern\n\n        The rglob method is using a new Algorithm that better fit S3 API\n        \"\"\"\n        self._absolute_path_validation()\n\n        sys.audit(\"pathlib.Path.rglob\", self, pattern)\n        if not pattern:\n            raise ValueError(f'Unacceptable pattern: {pattern}')\n        drv, root, pattern_parts = self._parse_path(pattern)\n        if drv or root:\n            raise NotImplementedError(\"Non-relative patterns are unsupported\")\n        for part in pattern_parts:\n            if part != '**' and '**' in part:\n                raise ValueError(\"Invalid pattern: '**' can only be an entire path component\")\n        pattern = f'**{self.parser.sep}{pattern}'\n        selector = _Selector(self, pattern=pattern)\n        yield from selector.select()\n\n    def get_presigned_url(self, expire_in: timedelta | int = 3600) -> str:\n        \"\"\"\n        Returns a pre-signed url. Anyone with the url can make a GET request to get the file.\n        You can set an expiration date with the expire_in argument (integer or timedelta object).\n\n        Note that generating a presigned url may require more information or setup than to use other\n        S3Path functions. It's because it needs to know the exact aws region and use s3v4 as signature\n        version. Meaning you may have to do this:\n\n        ```python\n        import boto3\n        from botocore.config import Config\n        from s3path import S3Path, register_configuration_parameter\n\n        resource = boto3.resource(\n            \"s3\",\n            config=Config(signature_version=\"s3v4\"),\n            region_name=\"the aws region name\"\n        )\n        register_configuration_parameter(S3Path(\"/\"), resource=resource)\n        ```\n\n        A simple example:\n        ```python\n        from s3path import S3Path\n        import requests\n\n        file = S3Path(\"/my-bucket/toto.txt\")\n        file.write_text(\"hello world\")\n\n        presigned_url = file.get_presigned_url()\n        print(requests.get(presigned_url).content)\n        b\"hello world\"\n        \"\"\"\n        self._absolute_path_validation()\n        if isinstance(expire_in, timedelta):\n            expire_in = int(expire_in.total_seconds())\n        if expire_in <= 0:\n            raise ValueError(\n                f\"The expire_in argument can't represent a negative or null time delta. \"\n                f'You provided expire_in = {expire_in} seconds which is below or equal to 0 seconds.')\n        return accessor.get_presigned_url(self, expire_in)\n\n    def unlink(self, missing_ok: bool = False):\n        \"\"\"\n        Remove this key from its bucket.\n        \"\"\"\n        self._absolute_path_validation()\n        # S3 doesn't care if you remove full prefixes or buckets with its delete API\n        # so unless we manually check, this call will be dropped through without any\n        # validation and could result in data loss\n        try:\n            if self.is_dir():\n                raise IsADirectoryError(str(self))\n            if not self.is_file():\n                raise FileNotFoundError(str(self))\n        except (IsADirectoryError, FileNotFoundError):\n            if missing_ok:\n                return\n            raise\n        try:\n            # XXX: Note: If we don't check if the file exists here, S3 will always return\n            # success even if we try to delete a key that doesn't exist. So, if we want\n            # to raise a `FileNotFoundError`, we need to manually check if the file exists\n            # before we make the API call -- since we want to delete the file anyway,\n            # we can just ignore this for now and be satisfied that the file will be removed\n            accessor.unlink(self)\n        except FileNotFoundError:\n            if not missing_ok:\n                raise\n\n    def walk(\n            self,\n            top_down: bool = True,\n            on_error:bool = None,\n            follow_symlinks: bool = False) -> Generator[tuple[Self, list[str], list[str]]]:\n        if follow_symlinks:\n            raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n\n        sys.audit(\"pathlib.Path.walk\", self, on_error, follow_symlinks)\n        yield from accessor.walk(self, topdown=top_down, onerror=on_error)\n\n\nclass PureVersionedS3Path(PureS3Path):\n    \"\"\"\n    PurePath subclass for AWS S3 service Keys with Versions.\n\n    S3 is not a file-system, but we can look at it like a POSIX system.\n    \"\"\"\n\n    def __new__(cls, *args, version_id: str):\n        self = super().__new__(cls, *args)\n        self.version_id = version_id\n        return self\n\n    def __repr__(self) -> str:\n        return f'{type(self).__name__}({self.as_posix()}, version_id={self.version_id})'\n\n    def __truediv__(self, key):\n        if not isinstance(key, (PureS3Path, str)):\n            return NotImplemented\n\n        key = S3Path(key) if isinstance(key, str) else key\n        return key.__rtruediv__(self)\n\n    def __rtruediv__(self, key):\n        if not isinstance(key, (PureS3Path, str)):\n            return NotImplemented\n\n        new_path = super().__rtruediv__(key)\n        new_path.version_id = self.version_id\n        return new_path\n\n    @classmethod\n    def from_uri(cls, uri: str, *, version_id: str) -> Self:\n        \"\"\"\n        from_uri class method creates a class instance from uri and version id\n\n        >> from s3path import VersionedS3Path\n        >> VersionedS3Path.from_uri('s3://<bucket>/<key>', version_id='<version_id>')\n        << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n        \"\"\"\n\n        self = PureS3Path.from_uri(uri)\n        return cls(self, version_id=version_id)\n\n    @classmethod\n    def from_bucket_key(cls, bucket: str, key: str, *, version_id: str) -> Self:\n        \"\"\"\n        from_bucket_key class method creates a class instance from bucket, key and version id\n\n        >> from s3path import VersionedS3Path\n        >> VersionedS3Path.from_bucket_key('<bucket>', '<key>', version_id='<version_id>')\n        << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n        \"\"\"\n\n        self = PureS3Path.from_bucket_key(bucket=bucket, key=key)\n        return cls(self, version_id=version_id)\n\n    def with_segments(self, *pathsegments) -> Self:\n        \"\"\"Construct a new path object from any number of path-like objects.\n        Subclasses may override this method to customize how new path objects\n        are created from methods like `iterdir()`.\n        \"\"\"\n        return type(self)(*pathsegments, version_id=self.version_id)\n\n    def joinpath(self, *args):\n        if not args:\n            return self\n\n        new_path = super().joinpath(*args)\n\n        if isinstance(args[-1], PureVersionedS3Path):\n            new_path.version_id = args[-1].version_id\n        else:\n            new_path = S3Path(new_path)\n\n        return new_path\n\n\nclass VersionedS3Path(PureVersionedS3Path, S3Path):\n    \"\"\"\n    S3Path subclass for AWS S3 service Keys with Versions.\n\n    >> from s3path import VersionedS3Path\n    >> VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n    << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n    \"\"\"\n\n    def __init__(self, *args, version_id):\n        super().__init__(*args)\n\n\ndef _is_wildcard_pattern(pat):\n    # Whether this pattern needs actual matching using fnmatch, or can\n    # be looked up directly as a file.\n    return \"*\" in pat or \"?\" in pat or \"[\" in pat\n\n\nclass _Selector:\n    def __init__(self, path, *, pattern):\n        self._path = path\n        self._prefix, pattern = self._prefix_splitter(pattern)\n        self._full_keys = self._calculate_full_or_just_folder(pattern)\n        self._target_level = self._calculate_pattern_level(pattern)\n        self.match = self._compile_pattern_parts(self._prefix, pattern, path.bucket)\n\n    def select(self):\n        for target in self._deep_cached_dir_scan():\n            target = f'{self._path.parser.sep}{self._path.bucket}{target}'\n            if self.match(target):\n                yield type(self._path)(target)\n\n    def _prefix_splitter(self, pattern):\n        if not _is_wildcard_pattern(pattern):\n            if self._path.key:\n                return f'{self._path.key}{self._path.parser.sep}{pattern}', ''\n            return pattern, ''\n\n        *_, pattern_parts = self._path._parse_path(pattern)\n        prefix = ''\n        for index, part in enumerate(pattern_parts):\n            if _is_wildcard_pattern(part):\n                break\n            prefix += f'{part}{self._path.parser.sep}'\n\n        if pattern.startswith(prefix):\n            pattern = pattern.replace(prefix, '', 1)\n\n        key_prefix = self._path.key\n        if key_prefix:\n            prefix = self._path.parser.sep.join((key_prefix, prefix))\n        return prefix, pattern\n\n    def _calculate_pattern_level(self, pattern):\n        if '**' in pattern:\n            return None\n        if self._prefix:\n            pattern = f'{self._prefix}{self._path.parser.sep}{pattern}'\n        *_, pattern_parts = self._path._parse_path(pattern)\n        return len(pattern_parts)\n\n    def _calculate_full_or_just_folder(self, pattern):\n        if '**' in pattern:\n            return True\n        *_, pattern_parts = self._path._parse_path(pattern)\n        for part in pattern_parts[:-1]:\n            if '*' in part:\n                return True\n        return False\n\n    def _deep_cached_dir_scan(self):\n        cache = set()\n        prefix_sep_count = self._prefix.count(self._path.parser.sep)\n        for key in accessor.iter_keys(self._path, prefix=self._prefix, full_keys=self._full_keys):\n            key_sep_count = key.count(self._path.parser.sep) + 1\n            key_parts = key.rsplit(self._path.parser.sep, maxsplit=key_sep_count - prefix_sep_count)\n            target_path_parts = key_parts[:self._target_level]\n            target_path = ''\n            for part in target_path_parts:\n                if not part:\n                    continue\n                target_path += f'{self._path.parser.sep}{part}'\n                if target_path in cache:\n                    continue\n                yield target_path\n                cache.add(target_path)\n\n    def _compile_pattern_parts(self, prefix, pattern, bucket):\n        pattern = self._path.parser.sep.join((\n            '',\n            bucket,\n            prefix,\n            pattern,\n        ))\n        *_, pattern_parts = self._path._parse_path(pattern)\n\n        new_regex_pattern = ''\n        for part in pattern_parts:\n            if part == self._path.parser.sep:\n                continue\n            if '**' in part:\n                new_regex_pattern += f'{self._path.parser.sep}*(?s:{part.replace(\"**\", \".*\")})'\n                continue\n            if '*' == part:\n                new_regex_pattern += f'{self._path.parser.sep}(?s:[^/]+)'\n                continue\n            new_regex_pattern += f'{self._path.parser.sep}{fnmatch.translate(part)[:-2]}'\n        new_regex_pattern += r'/*\\Z'\n        return re.compile(new_regex_pattern).fullmatch\n"
  },
  {
    "path": "s3path/old_versions.py",
    "content": "\"\"\"\ns3path provides a Pythonic API to S3 by wrapping boto3 with pathlib interface\n\"\"\"\nfrom __future__ import annotations\n\nimport re\nimport sys\nimport fnmatch\nfrom os import stat_result\nfrom threading import Lock\nfrom itertools import chain\nfrom datetime import timedelta\nfrom functools import lru_cache\nfrom contextlib import suppress\nfrom urllib.parse import unquote\nfrom collections import namedtuple, deque\nfrom typing import Union, Generator, Literal, Optional\nfrom io import DEFAULT_BUFFER_SIZE, UnsupportedOperation, TextIOWrapper\n\nfrom pathlib import _PosixFlavour, _is_wildcard_pattern, PurePath, Path\n\nimport boto3\nfrom boto3.s3.transfer import TransferManager\nfrom boto3.resources.factory import ServiceResource\nfrom botocore.exceptions import ClientError\nfrom botocore.docs.docstring import LazyLoadedDocstring\nimport smart_open\nimport smart_open.s3\n\n\n__all__ = (\n    'register_configuration_parameter',\n    'S3Path',\n    'VersionedS3Path',\n    'PureS3Path',\n    'PureVersionedS3Path',\n    'StatResult',\n)\n\nALLOWED_COPY_ARGS = TransferManager.ALLOWED_COPY_ARGS\n\n\nclass _S3Flavour(_PosixFlavour):\n    is_supported = bool(boto3)\n\n    def parse_parts(self, parts):\n        drv, root, parsed = super().parse_parts(parts)\n        for part in parsed[1:]:\n            if part == '..':\n                index = parsed.index(part)\n                parsed.pop(index - 1)\n                parsed.remove(part)\n        return drv, root, parsed\n\n    def make_uri(self, path):\n        uri = super().make_uri(path)\n        return uri.replace('file:///', 's3://')\n\n    def compile_pattern_parts(self, path, prefix, pattern, bucket):\n        pattern = self.sep.join((\n            '',\n            bucket,\n            prefix,\n            pattern,\n        ))\n\n        *_, pattern_parts = self.parse_parts((pattern,))\n        new_regex_pattern = ''\n        for part in pattern_parts:\n            if part == self.sep:\n                continue\n            if '**' in part:\n                new_regex_pattern += f'{self.sep}*(?s:{part.replace(\"**\", \".*\")})'\n                continue\n            if '*' == part:\n                new_regex_pattern += f'{path._flavour.sep}(?s:[^/]+)'\n                continue\n            new_regex_pattern += f'{self.sep}{fnmatch.translate(part)[:-2]}'\n        new_regex_pattern += r'/*\\Z'\n        return re.compile(new_regex_pattern).fullmatch\n\n\nclass _S3ConfigurationMap:\n    def __init__(self, default_resource_kwargs, **default_arguments):\n        self.default_resource_kwargs = default_resource_kwargs\n        self.default_arguments = default_arguments\n        self.arguments = None\n        self.resources = None\n        self.general_options = None\n        self.setup_lock = Lock()\n        self.is_setup = False\n\n    @property\n    def default_resource(self):\n        return boto3.resource('s3', **self.default_resource_kwargs)\n\n    def _delayed_setup(self):\n        \"\"\" Resolves a circular dependency between us and PureS3Path \"\"\"\n        with self.setup_lock:\n            if not self.is_setup:\n                self.arguments = {PureS3Path('/'): self.default_arguments}\n                self.resources = {PureS3Path('/'): self.default_resource}\n                self.general_options = {PureS3Path('/'): {'glob_new_algorithm': True}}\n                self.is_setup = True\n\n    def __repr__(self):\n        return f'{type(self).__name__}' \\\n               f'(arguments={self.arguments}, resources={self.resources}, is_setup={self.is_setup})'\n\n    def set_configuration(self, path, *, resource=None, arguments=None, glob_new_algorithm=None):\n        self._delayed_setup()\n        if arguments is not None:\n            self.arguments[path] = arguments\n        if resource is not None:\n            self.resources[path] = resource\n        if glob_new_algorithm is not None:\n            self.general_options[path] = {'glob_new_algorithm': glob_new_algorithm}\n        self.get_configuration.cache_clear()\n\n    @lru_cache()\n    def get_configuration(self, path):\n        self._delayed_setup()\n        resources = arguments = None\n        for path in chain([path], path.parents):\n            if resources is None and path in self.resources:\n                resources = self.resources[path]\n            if arguments is None and path in self.arguments:\n                arguments = self.arguments[path]\n        return resources, arguments\n\n    @lru_cache()\n    def get_general_options(self, path):\n        self._delayed_setup()\n        for path in chain([path], path.parents):\n            if path in self.general_options:\n                return self.general_options[path]\n        return\n\n\nclass _S3Scandir:\n    def __init__(self, *, s3_accessor, path):\n        self._s3_accessor = s3_accessor\n        self._path = path\n\n    def __enter__(self):\n        return self\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n\n    def __iter__(self) -> Generator[_S3DirEntry, None, None]:\n        bucket_name = self._path.bucket\n        resource, _ = self._s3_accessor.configuration_map.get_configuration(self._path)\n        if not bucket_name:\n            for bucket in resource.buckets.all():\n                yield _S3DirEntry(bucket.name, is_dir=True)\n            return\n        bucket = resource.Bucket(bucket_name)\n        sep = self._path._flavour.sep\n\n        kwargs = {\n            'Bucket': bucket.name,\n            'Prefix': self._s3_accessor.generate_prefix(self._path),\n            'Delimiter': sep}\n\n        continuation_token = None\n        while True:\n            if continuation_token:\n                kwargs['ContinuationToken'] = continuation_token\n            response = bucket.meta.client.list_objects_v2(**kwargs)\n            for folder in response.get('CommonPrefixes', ()):\n                full_name = folder['Prefix'][:-1] if folder['Prefix'].endswith(sep) else folder['Prefix']\n                name = full_name.split(sep)[-1]\n                yield _S3DirEntry(name, is_dir=True)\n            for file in response.get('Contents', ()):\n                if file['Key'] == response['Prefix']:\n                    continue\n                name = file['Key'].split(sep)[-1]\n                yield _S3DirEntry(name=name, is_dir=False, size=file['Size'], last_modified=file['LastModified'])\n            if not response.get('IsTruncated'):\n                break\n            continuation_token = response.get('NextContinuationToken')\n\n\nclass _S3Accessor:\n    \"\"\"\n    An accessor implements a particular (system-specific or not)\n    way of accessing paths on the filesystem.\n\n    In this case this will access AWS S3 service\n    \"\"\"\n\n    def __init__(self, **kwargs):\n        self.configuration_map = _S3ConfigurationMap(default_resource_kwargs=kwargs)\n\n    def stat(self, path, *, follow_symlinks=True):\n        if not follow_symlinks:\n            raise NotImplementedError(\n                f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        resource, _ = self.configuration_map.get_configuration(path)\n        object_summary = resource.ObjectSummary(path.bucket, path.key)\n        return StatResult(\n            size=object_summary.size,\n            last_modified=object_summary.last_modified,\n        )\n\n    def is_dir(self, path):\n        if str(path) == path.root:\n            return True\n        resource, _ = self.configuration_map.get_configuration(path)\n        bucket = resource.Bucket(path.bucket)\n        return any(bucket.objects.filter(Prefix=self.generate_prefix(path)))\n\n    def exists(self, path):\n        bucket_name = path.bucket\n        resource, _ = self.configuration_map.get_configuration(path)\n        if not path.key:\n            # Check whether or not the bucket exists.\n            # See https://stackoverflow.com/questions/26871884\n            try:\n                resource.meta.client.head_bucket(Bucket=bucket_name)\n                return True\n            except ClientError as e:\n                error_code = e.response['Error']['Code']\n                if error_code == '404':\n                    # Not found\n                    return False\n                raise e\n        bucket = resource.Bucket(bucket_name)\n        key_name = str(path.key)\n        for object in bucket.objects.filter(Prefix=key_name):\n            if object.key == key_name:\n                return True\n            if object.key.startswith(key_name + path._flavour.sep):\n                return True\n        return False\n\n    def scandir(self, path) -> _S3Scandir:\n        return _S3Scandir(s3_accessor=self, path=path)\n\n    def listdir(self, path):\n        with self.scandir(path) as scandir_iter:\n            return [entry.name for entry in scandir_iter]\n\n    def open(self, path, *, mode='r', buffering=-1, encoding=None, errors=None, newline=None):\n        resource, config = self.configuration_map.get_configuration(path)\n\n        smart_open_kwargs = {\n            'uri': \"s3:/\" + str(path),\n            'mode': mode,\n            'buffering': buffering,\n            'encoding': encoding,\n            'errors': errors,\n            'newline': newline,\n        }\n        transport_params = {'defer_seek': True}\n        dummy_object = resource.Object('bucket', 'key')\n        self._update_smart_open_kwargs(\n            dummy_object,\n            resource,\n            config,\n            transport_params,\n            smart_open_kwargs,\n        )\n\n        file_object = smart_open.open(**smart_open_kwargs)\n        return file_object\n\n    def owner(self, path):\n        bucket_name = path.bucket\n        key_name = path.key\n        resource, _ = self.configuration_map.get_configuration(path)\n        object_summary = resource.ObjectSummary(bucket_name, key_name)\n        # return object_summary.owner['DisplayName']\n        # This is a hack till boto3 resolve this issue:\n        # https://github.com/boto/boto3/issues/1950\n        responce = object_summary.meta.client.list_objects_v2(\n            Bucket=object_summary.bucket_name,\n            Prefix=object_summary.key,\n            FetchOwner=True)\n        return responce['Contents'][0]['Owner']['DisplayName']\n\n    def rename(self, path, target):\n        source_bucket_name = path.bucket\n        source_key_name = path.key\n        target_bucket_name = target.bucket\n        target_key_name = target.key\n\n        resource, config = self.configuration_map.get_configuration(path)\n\n        if not self.is_dir(path):\n            target_bucket = resource.Bucket(target_bucket_name)\n            object_summary = resource.ObjectSummary(source_bucket_name, source_key_name)\n            old_source = {'Bucket': object_summary.bucket_name, 'Key': object_summary.key}\n            self._boto3_method_with_extraargs(\n                target_bucket.copy,\n                config=config,\n                args=(old_source, target_key_name),\n                allowed_extra_args=ALLOWED_COPY_ARGS,\n            )\n            self._boto3_method_with_parameters(object_summary.delete)\n            return\n        bucket = resource.Bucket(source_bucket_name)\n        target_bucket = resource.Bucket(target_bucket_name)\n        for object_summary in bucket.objects.filter(Prefix=source_key_name):\n            old_source = {'Bucket': object_summary.bucket_name, 'Key': object_summary.key}\n            new_key = object_summary.key.replace(source_key_name, target_key_name)\n            _, config = self.configuration_map.get_configuration(S3Path(target_bucket_name, new_key))\n            self._boto3_method_with_extraargs(\n                target_bucket.copy,\n                config=config,\n                args=(old_source, new_key),\n                allowed_extra_args=ALLOWED_COPY_ARGS,\n            )\n            self._boto3_method_with_parameters(object_summary.delete)\n\n    def replace(self, path, target):\n        return self.rename(path, target)\n\n    def rmdir(self, path):\n        bucket_name = path.bucket\n        key_name = path.key\n        resource, config = self.configuration_map.get_configuration(path)\n        bucket = resource.Bucket(bucket_name)\n        for object_summary in bucket.objects.filter(Prefix=key_name):\n            self._boto3_method_with_parameters(object_summary.delete, config=config)\n        if path.is_bucket:\n            self._boto3_method_with_parameters(bucket.delete, config=config)\n\n    def mkdir(self, path, mode):\n        resource, config = self.configuration_map.get_configuration(path)\n        self._boto3_method_with_parameters(\n            resource.create_bucket,\n            config=config,\n            kwargs={'Bucket': path.bucket},\n        )\n\n    def generate_prefix(self, path):\n        sep = path._flavour.sep\n        if not path.key:\n            return ''\n        key_name = path.key\n        if not key_name.endswith(sep):\n            return key_name + sep\n        return key_name\n\n    def unlink(self, path, *args, **kwargs):\n        bucket_name = path.bucket\n        key_name = path.key\n        resource, config = self.configuration_map.get_configuration(path)\n        bucket = resource.Bucket(bucket_name)\n        try:\n            self._boto3_method_with_parameters(\n                bucket.meta.client.delete_object,\n                config=config,\n                kwargs={\"Bucket\": bucket_name, \"Key\": key_name}\n            )\n        except ClientError:\n            raise OSError(f'/{bucket_name}/{key_name}')\n\n    def get_presigned_url(self,path,  expire_in: int) -> str:\n        resource, config = self.configuration_map.get_configuration(path)\n        return self._boto3_method_with_parameters(\n            resource.meta.client.generate_presigned_url,\n            config=config,\n            kwargs=dict(\n                ClientMethod=\"get_object\",\n                Params={\"Bucket\": path.bucket, \"Key\": path.key},\n                ExpiresIn=expire_in,\n            )\n        )\n\n    def iter_keys(self, path, *, prefix=None, full_keys=True):\n        resource, _ = self.configuration_map.get_configuration(path)\n        bucket_name = path.bucket\n\n        def get_keys():\n            continuation_token = None\n            while True:\n                if continuation_token:\n                    kwargs['ContinuationToken'] = continuation_token\n                response = resource.meta.client.list_objects_v2(**kwargs)\n                for file in response.get('Contents', ()):\n                    yield file['Key']\n                for folder in response.get('CommonPrefixes', ()):\n                    yield folder['Prefix']\n                if not response.get('IsTruncated'):\n                    break\n                continuation_token = response.get('NextContinuationToken')\n\n        # get buckets\n        if not bucket_name and not full_keys:\n            for bucket in resource.buckets.filter():\n                yield bucket.name\n            return\n        # get keys in buckets\n        if not bucket_name:\n            for bucket in resource.buckets.filter():\n                kwargs = {'Bucket': bucket.name}\n                yield from get_keys()\n            return\n        # get keys or part of keys in buckets\n        kwargs = {'Bucket': bucket_name}\n        if prefix:\n            kwargs['Prefix'] = prefix\n        if not full_keys:\n            kwargs['Delimiter'] = path._flavour.sep\n        yield from get_keys()\n\n    def _update_kwargs_with_config(self, boto3_method, config, kwargs=None):\n        kwargs = kwargs or {}\n        if config is not None:\n            kwargs.update({\n                key: value\n                for key, value in config.items()\n                if key in self._get_action_arguments(boto3_method)\n            })\n        return kwargs\n\n    @lru_cache()\n    def _get_action_arguments(self, action):\n        if isinstance(action.__doc__, LazyLoadedDocstring):\n            docs = action.__doc__._generate()\n        else:\n            docs = action.__doc__\n        return set(\n            line.replace(':param ', '').strip().strip(':')\n            for line in docs.splitlines()\n            if line.startswith(':param ')\n        )\n\n    def _boto3_method_with_parameters(self, boto3_method, config=None, args=(), kwargs=None):\n        kwargs = self._update_kwargs_with_config(boto3_method, config, kwargs)\n        return boto3_method(*args, **kwargs)\n\n    def _boto3_method_with_extraargs(\n            self,\n            boto3_method,\n            config=None,\n            args=(),\n            kwargs=None,\n            extra_args=None,\n            allowed_extra_args=()):\n        kwargs = kwargs or {}\n        extra_args = extra_args or {}\n        if config is not None:\n            extra_args.update({\n                key: value\n                for key, value in config.items()\n                if key in allowed_extra_args\n            })\n        kwargs[\"ExtraArgs\"] = extra_args\n        return boto3_method(*args, **kwargs)\n\n    def _update_smart_open_kwargs(\n            self,\n            dummy_object,\n            resource,\n            config,\n            transport_params,\n            smart_open_kwargs):\n        \"\"\"\n        New Smart-Open (>=5.1.0) api\n        Doc: https://github.com/RaRe-Technologies/smart_open/blob/develop/MIGRATING_FROM_OLDER_VERSIONS.rst\n        \"\"\"\n        get_object_kwargs = self._update_kwargs_with_config(\n            dummy_object.meta.client.get_object, config=config)\n        create_multipart_upload_kwargs = self._update_kwargs_with_config(\n            dummy_object.meta.client.create_multipart_upload, config=config)\n        transport_params.update(\n            client=resource.meta.client,\n            client_kwargs={\n                'S3.Client.create_multipart_upload': create_multipart_upload_kwargs,\n                'S3.Client.get_object': get_object_kwargs\n            },\n        )\n        smart_open_kwargs.update(\n            compression='disable',\n            transport_params=transport_params,\n        )\n\n\nclass _VersionedS3Accessor(_S3Accessor):\n\n    def stat(self, path, *, follow_symlinks=True):\n        if not follow_symlinks:\n            raise NotImplementedError(\n                f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n        resource, _ = self.configuration_map.get_configuration(path)\n\n        object_summary = resource.ObjectVersion(path.bucket, path.key, path.version_id).get()\n\n        return StatResult(\n            size=object_summary.get('ContentLength'),\n            last_modified=object_summary.get('LastModified'),\n            version_id=object_summary.get('VersionId'),\n        )\n\n    def exists(self, path):\n        resource, _ = self.configuration_map.get_configuration(path)\n        bucket = resource.Bucket(path.bucket)\n        key = path.key\n\n        for obj in bucket.object_versions.filter(Prefix=key):\n            key_match = (obj.key == key) or obj.key.startswith(key + path._flavour.sep)\n            if key_match and (obj.version_id == path.version_id):\n                return True\n\n        return False\n\n    def open(self, path, *, mode='r', buffering=-1, encoding=None, errors=None, newline=None):\n        resource, config = self.configuration_map.get_configuration(path)\n\n        smart_open_kwargs = {\n            'uri': \"s3:/\" + str(path),\n            'mode': mode,\n            'buffering': buffering,\n            'encoding': encoding,\n            'errors': errors,\n            'newline': newline,\n        }\n        transport_params = {'defer_seek': True, \"version_id\": path.version_id}\n        dummy_object = resource.Object('bucket', 'key')\n        self._update_smart_open_kwargs(\n            dummy_object,\n            resource,\n            config,\n            transport_params,\n            smart_open_kwargs,\n        )\n\n        file_object = smart_open.open(**smart_open_kwargs)\n        return file_object\n\n\nclass _PathNotSupportedMixin:\n    _NOT_SUPPORTED_MESSAGE = '{method} is unsupported on S3 service'\n\n    @classmethod\n    def cwd(cls):\n        \"\"\"\n        cwd class method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = cls._NOT_SUPPORTED_MESSAGE.format(method=cls.cwd.__qualname__)\n        raise NotImplementedError(message)\n\n    @classmethod\n    def home(cls):\n        \"\"\"\n        home class method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = cls._NOT_SUPPORTED_MESSAGE.format(method=cls.home.__qualname__)\n        raise NotImplementedError(message)\n\n    def chmod(self, mode, *, follow_symlinks=True):\n        \"\"\"\n        chmod method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.chmod.__qualname__)\n        raise NotImplementedError(message)\n\n    def expanduser(self):\n        \"\"\"\n        expanduser method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.expanduser.__qualname__)\n        raise NotImplementedError(message)\n\n    def lchmod(self, mode):\n        \"\"\"\n        lchmod method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.lchmod.__qualname__)\n        raise NotImplementedError(message)\n\n    def group(self):\n        \"\"\"\n        group method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.group.__qualname__)\n        raise NotImplementedError(message)\n\n    def is_block_device(self):\n        \"\"\"\n        is_block_device method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.is_block_device.__qualname__)\n        raise NotImplementedError(message)\n\n    def is_char_device(self):\n        \"\"\"\n        is_char_device method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.is_char_device.__qualname__)\n        raise NotImplementedError(message)\n\n    def lstat(self):\n        \"\"\"\n        lstat method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.lstat.__qualname__)\n        raise NotImplementedError(message)\n\n    def resolve(self):\n        \"\"\"\n        resolve method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.resolve.__qualname__)\n        raise NotImplementedError(message)\n\n    def symlink_to(self, *args, **kwargs):\n        \"\"\"\n        symlink_to method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.symlink_to.__qualname__)\n        raise NotImplementedError(message)\n\n    def hardlink_to(self, *args, **kwargs):\n        \"\"\"\n        hardlink_to method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.hardlink_to.__qualname__)\n        raise NotImplementedError(message)\n\n    def readlink(self):\n        \"\"\"\n        readlink method is unsupported on S3 service\n        AWS S3 don't have this file system action concept\n        \"\"\"\n        message = self._NOT_SUPPORTED_MESSAGE.format(method=self.readlink.__qualname__)\n        raise NotImplementedError(message)\n\n\nclass _Selector:\n    def __init__(self, path, *, pattern):\n        self._path = path\n        self._prefix, pattern = self._prefix_splitter(pattern)\n        self._full_keys = self._calculate_full_or_just_folder(pattern)\n        self._target_level = self._calculate_pattern_level(pattern)\n        self.match = self._path._flavour.compile_pattern_parts(self._path, self._prefix, pattern, path.bucket)\n\n    def select(self):\n        for target in self._deep_cached_dir_scan():\n            target = f'{self._path._flavour.sep}{self._path.bucket}{target}'\n            if self.match(target):\n                yield type(self._path)(target)\n\n    def _prefix_splitter(self, pattern):\n        if not _is_wildcard_pattern(pattern):\n            if self._path.key:\n                return f'{self._path.key}{self._path._flavour.sep}{pattern}', ''\n            return pattern, ''\n\n        *_, pattern_parts = self._path._flavour.parse_parts((pattern,))\n        prefix = ''\n        for index, part in enumerate(pattern_parts):\n            if _is_wildcard_pattern(part):\n                break\n            prefix += f'{part}{self._path._flavour.sep}'\n\n        if pattern.startswith(prefix):\n            pattern = pattern.replace(prefix, '', 1)\n\n        key_prefix = self._path.key\n        if key_prefix:\n            prefix = self._path._flavour.sep.join((key_prefix, prefix))\n        return prefix, pattern\n\n    def _calculate_pattern_level(self, pattern):\n        if '**' in pattern:\n            return None\n        if self._prefix:\n            pattern = f'{self._prefix}{self._path._flavour.sep}{pattern}'\n        *_, pattern_parts = self._path._flavour.parse_parts((pattern,))\n        return len(pattern_parts)\n\n    def _calculate_full_or_just_folder(self, pattern):\n        if '**' in pattern:\n            return True\n        *_, pattern_parts = self._path._flavour.parse_parts((pattern,))\n        for part in pattern_parts[:-1]:\n            if '*' in part:\n                return True\n        return False\n\n    def _deep_cached_dir_scan(self):\n        cache = set()\n        prefix_sep_count = self._prefix.count(self._path._flavour.sep)\n        for key in self._path._accessor.iter_keys(self._path, prefix=self._prefix, full_keys=self._full_keys):\n            key_sep_count = key.count(self._path._flavour.sep) + 1\n            key_parts = key.rsplit(self._path._flavour.sep, maxsplit=key_sep_count - prefix_sep_count)\n            target_path_parts = key_parts[:self._target_level]\n            target_path = ''\n            for part in target_path_parts:\n                if not part:\n                    continue\n                target_path += f'{self._path._flavour.sep}{part}'\n                if target_path in cache:\n                    continue\n                yield target_path\n                cache.add(target_path)\n\n\n_s3_flavour = _S3Flavour()\n_s3_accessor = _S3Accessor()\n_versioned_s3_accessor = _VersionedS3Accessor()\n\n\ndef register_configuration_parameter(\n        path: PureS3Path,\n        *,\n        parameters: Optional[dict] = None,\n        resource: Optional[ServiceResource] = None,\n        glob_new_algorithm: Optional[bool] = None):\n    if not isinstance(path, PureS3Path):\n        raise TypeError(f'path argument have to be a {PurePath} type. got {type(path)}')\n    if parameters and not isinstance(parameters, dict):\n        raise TypeError(f'parameters argument have to be a dict type. got {type(path)}')\n    if parameters is None and resource is None and glob_new_algorithm is None:\n        raise ValueError('user have to specify parameters or resource arguments')\n    _s3_accessor.configuration_map.set_configuration(\n        path,\n        resource=resource,\n        arguments=parameters,\n        glob_new_algorithm=glob_new_algorithm)\n\n\nclass PureS3Path(PurePath):\n    \"\"\"\n    PurePath subclass for AWS S3 service.\n\n    S3 is not a file-system but we can look at it like a POSIX system.\n    \"\"\"\n    _flavour = _s3_flavour\n    __slots__ = ()\n\n    @classmethod\n    def from_uri(cls, uri: str):\n        \"\"\"\n        from_uri class method create a class instance from url\n\n        >> from s3path import PureS3Path\n        >> PureS3Path.from_uri('s3://<bucket>/<key>')\n        << PureS3Path('/<bucket>/<key>')\n        \"\"\"\n        if not uri.startswith('s3://'):\n            raise ValueError('Provided uri seems to be no S3 URI!')\n        unquoted_uri = unquote(uri)\n        return cls(unquoted_uri[4:])\n\n    @property\n    def bucket(self) -> str:\n        \"\"\"\n        The AWS S3 Bucket name, or ''\n        \"\"\"\n        self._absolute_path_validation()\n        with suppress(ValueError):\n            _, bucket, *_ = self.parts\n            return bucket\n        return ''\n\n    @property\n    def is_bucket(self) -> bool:\n        \"\"\"\n        Check if Path is a bucket\n        \"\"\"\n        return self.is_absolute() and self == PureS3Path(f\"/{self.bucket}\")\n\n    @property\n    def key(self) -> str:\n        \"\"\"\n        The AWS S3 Key name, or ''\n        \"\"\"\n        self._absolute_path_validation()\n        key = self._flavour.sep.join(self.parts[2:])\n        return key\n\n    @classmethod\n    def from_bucket_key(cls, bucket: str, key: str):\n        \"\"\"\n        from_bucket_key class method create a class instance from bucket, key pair's\n\n        >> from s3path import PureS3Path\n        >> PureS3Path.from_bucket_key(bucket='<bucket>', key='<key>')\n        << PureS3Path('/<bucket>/<key>')\n        \"\"\"\n        bucket = cls(cls._flavour.sep, bucket)\n        if len(bucket.parts) != 2:\n            raise ValueError(f'bucket argument contains more then one path element: {bucket}')\n        key = cls(key)\n        if key.is_absolute():\n            key = key.relative_to('/')\n        return bucket / key\n\n    def as_uri(self) -> str:\n        \"\"\"\n        Return the path as a 's3' URI.\n        \"\"\"\n        return super().as_uri()\n\n    def _absolute_path_validation(self):\n        if not self.is_absolute():\n            raise ValueError('relative path have no bucket, key specification')\n\n\nclass S3Path(_PathNotSupportedMixin, Path, PureS3Path):\n    \"\"\"\n    Path subclass for AWS S3 service.\n\n    S3Path provide a Python convenient File-System/Path like interface for AWS S3 Service\n     using boto3 S3 resource as a driver.\n\n    If boto3 isn't installed in your environment NotImplementedError will be raised.\n    \"\"\"\n    _accessor = _s3_accessor\n    __slots__ = ()\n\n    def _init(self, template=None):\n        super()._init(template)\n        if template is None:\n            self._accessor = _s3_accessor\n\n    def stat(self, *, follow_symlinks: bool = True) -> StatResult:\n        \"\"\"\n        Returns information about this path (similarly to boto3's ObjectSummary).\n        For compatibility with pathlib, the returned object some similar attributes like os.stat_result.\n        The result is looked up at each call to this method\n        \"\"\"\n        if not follow_symlinks:\n            raise NotImplementedError(\n                f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')\n\n        self._absolute_path_validation()\n        if not self.key:\n            return None\n        return self._accessor.stat(self, follow_symlinks=follow_symlinks)\n\n    def exists(self) -> bool:\n        \"\"\"\n        Whether the path points to an existing Bucket, key or key prefix.\n        \"\"\"\n        self._absolute_path_validation()\n        if not self.bucket:\n            return True\n        return self._accessor.exists(self)\n\n    def is_dir(self) -> bool:\n        \"\"\"\n        Returns True if the path points to a Bucket or a key prefix, False if it points to a full key path.\n        False is also returned if the path doesn’t exist.\n        Other errors (such as permission errors) are propagated.\n        \"\"\"\n        self._absolute_path_validation()\n        if self.bucket and not self.key:\n            return True\n        return self._accessor.is_dir(self)\n\n    def is_file(self) -> bool:\n        \"\"\"\n        Returns True if the path points to a Bucket key, False if it points to Bucket or a key prefix.\n        False is also returned if the path doesn’t exist.\n        Other errors (such as permission errors) are propagated.\n        \"\"\"\n        self._absolute_path_validation()\n        if not self.bucket or not self.key:\n            return False\n        try:\n            return bool(self.stat())\n        except ClientError:\n            return False\n\n    def iterdir(self) -> Generator[S3Path, None, None]:\n        \"\"\"\n        When the path points to a Bucket or a key prefix, yield path objects of the directory contents\n        \"\"\"\n        self._absolute_path_validation()\n        for name in self._accessor.listdir(self):\n            yield self._make_child_relpath(name)\n\n    def glob(self, pattern: str) -> Generator[S3Path, None, None]:\n        \"\"\"\n        Glob the given relative pattern in the Bucket / key prefix represented by this path,\n        yielding all matching files (of any kind)\n        \"\"\"\n        self._absolute_path_validation()\n        general_options = self._accessor.configuration_map.get_general_options(self)\n        glob_new_algorithm = general_options['glob_new_algorithm']\n        if not glob_new_algorithm:\n            yield from super().glob(pattern)\n            return\n        yield from self._glob(pattern)\n\n    def _glob(self, pattern):\n        \"\"\" Glob with new Algorithm that better fit S3 API \"\"\"\n        sys.audit(\"pathlib.Path.glob\", self, pattern)\n        if not pattern:\n            raise ValueError(f'Unacceptable pattern: {pattern}')\n        drv, root, pattern_parts = self._flavour.parse_parts((pattern,))\n        if drv or root:\n            raise NotImplementedError(\"Non-relative patterns are unsupported\")\n        for part in pattern_parts:\n            if part != '**' and '**' in part:\n                raise ValueError(\"Invalid pattern: '**' can only be an entire path component\")\n        selector = _Selector(self, pattern=pattern)\n        yield from selector.select()\n\n    def _scandir(self):\n        \"\"\"\n        Override _scandir so _Selector will rely on an S3 compliant implementation\n        \"\"\"\n        return self._accessor.scandir(self)\n\n    def rglob(self, pattern: str) -> Generator[S3Path, None, None]:\n        \"\"\"\n        This is like calling S3Path.glob with \"**/\" added in front of the given relative pattern\n        \"\"\"\n        self._absolute_path_validation()\n        general_options = self._accessor.configuration_map.get_general_options(self)\n        glob_new_algorithm = general_options['glob_new_algorithm']\n        if not glob_new_algorithm:\n            yield from super().rglob(pattern)\n            return\n        yield from self._rglob(pattern)\n\n    def _rglob(self, pattern):\n        \"\"\" RGlob with new Algorithm that better fit S3 API \"\"\"\n        sys.audit(\"pathlib.Path.rglob\", self, pattern)\n        if not pattern:\n            raise ValueError(f'Unacceptable pattern: {pattern}')\n        drv, root, pattern_parts = self._flavour.parse_parts((pattern,))\n        if drv or root:\n            raise NotImplementedError(\"Non-relative patterns are unsupported\")\n        for part in pattern_parts:\n            if part != '**' and '**' in part:\n                raise ValueError(\"Invalid pattern: '**' can only be an entire path component\")\n        pattern = f'**{self._flavour.sep}{pattern}'\n        selector = _Selector(self, pattern=pattern)\n        yield from selector.select()\n\n    def open(\n            self,\n            mode: Literal[\"r\", \"w\", \"rb\", \"wb\"] = 'r',\n            buffering: int = DEFAULT_BUFFER_SIZE,\n            encoding: Optional[str] = None,\n            errors: Optional[str] = None,\n            newline: Optional[str] = None\n    ) -> Union[TextIOWrapper, smart_open.s3.Reader,   smart_open.s3.MultipartWriter]:\n        \"\"\"\n        Opens the Bucket key pointed to by the path, returns a Key file object that you can read/write with\n        \"\"\"\n        self._absolute_path_validation()\n        return self._accessor.open(\n            self,\n            mode=mode,\n            buffering=buffering,\n            encoding=encoding,\n            errors=errors,\n            newline=newline)\n\n    def owner(self) -> str:\n        \"\"\"\n        Returns the name of the user owning the Bucket or key.\n        Similarly to boto3's ObjectSummary owner attribute\n        \"\"\"\n        self._absolute_path_validation()\n        if not self.is_file():\n            return KeyError('file not found')\n        return self._accessor.owner(self)\n\n    def rename(self, target: Union[str, S3Path]) -> S3Path:\n        \"\"\"\n        Renames this file or Bucket / key prefix / key to the given target.\n        If target exists and is a file, it will be replaced silently if the user has permission.\n        If path is a key prefix, it will replace all the keys with the same prefix to the new target prefix.\n        Target can be either a string or another S3Path object.\n        \"\"\"\n        self._absolute_path_validation()\n        if not isinstance(target, type(self)):\n            target = type(self)(target)\n        target._absolute_path_validation()\n        self._accessor.rename(self, target)\n        return self.__class__(target)\n\n    def replace(self, target: Union[str, S3Path]) -> S3Path:\n        \"\"\"\n        Renames this Bucket / key prefix / key to the given target.\n        If target points to an existing Bucket / key prefix / key, it will be unconditionally replaced.\n        \"\"\"\n        return self.rename(target)\n\n    def unlink(self, missing_ok: bool = False):\n        \"\"\"\n        Remove this key from its bucket.\n        \"\"\"\n        self._absolute_path_validation()\n        # S3 doesn't care if you remove full prefixes or buckets with its delete API\n        # so unless we manually check, this call will be dropped through without any\n        # validation and could result in data loss\n        try:\n            if self.is_dir():\n                raise IsADirectoryError(str(self))\n            if not self.is_file():\n                raise FileNotFoundError(str(self))\n        except (IsADirectoryError, FileNotFoundError):\n            if missing_ok:\n                return\n            raise\n        try:\n            # XXX: Note: If we don't check if the file exists here, S3 will always return\n            # success even if we try to delete a key that doesn't exist. So, if we want\n            # to raise a `FileNotFoundError`, we need to manually check if the file exists\n            # before we make the API call -- since we want to delete the file anyway,\n            # we can just ignore this for now and be satisfied that the file will be removed\n            self._accessor.unlink(self)\n        except FileNotFoundError:\n            if not missing_ok:\n                raise\n\n    def rmdir(self):\n        \"\"\"\n        Removes this Bucket / key prefix. The Bucket / key prefix must be empty\n        \"\"\"\n        self._absolute_path_validation()\n        if self.is_file():\n            raise NotADirectoryError()\n        if not self.is_dir():\n            raise FileNotFoundError()\n        self._accessor.rmdir(self)\n\n    def samefile(self, other_path: Union[str, S3Path]) -> bool:\n        \"\"\"\n        Returns whether this path points to the same Bucket key as other_path,\n        Which can be either a Path object, or a string\n        \"\"\"\n        self._absolute_path_validation()\n        if not isinstance(other_path, Path):\n            other_path = type(self)(other_path)\n        return self.bucket == other_path.bucket and self.key == other_path.key and self.is_file()\n\n    def touch(self, mode: int = 0o666, exist_ok: bool = True):\n        \"\"\"\n        Creates a key at this given path.\n        If the key already exists,\n        the function succeeds if exist_ok is true (and its modification time is updated to the current time),\n        otherwise FileExistsError is raised\n        \"\"\"\n        if self.exists() and not exist_ok:\n            raise FileExistsError()\n        self.write_text('')\n\n    def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False):\n        \"\"\"\n        Create a path bucket.\n        AWS S3 Service doesn't support folders, therefore the mkdir method will only create the current bucket.\n        If the bucket path already exists, FileExistsError is raised.\n\n        If exist_ok is false (the default), FileExistsError is raised if the target Bucket already exists.\n        If exist_ok is true, OSError exceptions will be ignored.\n\n        if parents is false (the default), mkdir will create the bucket only if this is a Bucket path.\n        if parents is true, mkdir will create the bucket even if the path have a Key path.\n\n        mode argument is ignored.\n        \"\"\"\n        try:\n            if not self.bucket:\n                raise FileNotFoundError(f'No bucket in {type(self)} {self}')\n            if self.key and not parents:\n                raise FileNotFoundError(f'Only bucket path can be created, got {self}')\n            if type(self)(self._flavour.sep, self.bucket).exists():\n                raise FileExistsError(f'Bucket {self.bucket} already exists')\n            self._accessor.mkdir(self, mode)\n        except OSError:\n            if not exist_ok:\n                raise\n\n    def is_mount(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have mounting feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_symlink(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have symlink feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_socket(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have sockets feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def is_fifo(self) -> Literal[False]:\n        \"\"\"\n        AWS S3 Service doesn't have fifo feature, There for this method will always return False\n        \"\"\"\n        return False\n\n    def absolute(self) -> S3Path:\n        \"\"\"\n        Handle absolute method only if the path is already an absolute one\n        since we have no way to compute an absolute path from a relative one in S3.\n        \"\"\"\n        if self.is_absolute():\n            return self\n        # We can't compute the absolute path from a relative one\n        raise ValueError(\"Absolute path can't be determined for relative S3Path objects\")\n\n    def get_presigned_url(self, expire_in: Union[timedelta, int] = 3600) -> str:\n        \"\"\"\n        Returns a pre-signed url. Anyone with the url can make a GET request to get the file.\n        You can set an expiration date with the expire_in argument (integer or timedelta object).\n\n        Note that generating a presigned url may require more information or setup than to use other\n        S3Path functions. It's because it needs to know the exact aws region and use s3v4 as signature\n        version. Meaning you may have to do this:\n\n        ```python\n        import boto3\n        from botocore.config import Config\n        from s3path import S3Path, register_configuration_parameter\n\n        resource = boto3.resource(\n            \"s3\",\n            config=Config(signature_version=\"s3v4\"),\n            region_name=\"the aws region name\"\n        )\n        register_configuration_parameter(S3Path(\"/\"), resource=resource)\n        ```\n\n        A simple example:\n        ```python\n        from s3path import S3Path\n        import requests\n\n        file = S3Path(\"/my-bucket/toto.txt\")\n        file.write_text(\"hello world\")\n\n        presigned_url = file.get_presigned_url()\n        print(requests.get(presigned_url).content)\n        b\"hello world\"\n        \"\"\"\n        self._absolute_path_validation()\n        if isinstance(expire_in, timedelta):\n            expire_in = int(expire_in.total_seconds())\n        if expire_in <= 0:\n            raise ValueError(\n                f\"The expire_in argument can't represent a negative or null time delta. \"\n                f\"You provided expire_in = {expire_in} seconds which is below or equal to 0 seconds.\")\n        return self._accessor.get_presigned_url(self, expire_in)\n\n\nclass PureVersionedS3Path(PureS3Path):\n    \"\"\"\n    PurePath subclass for AWS S3 service Keys with Versions.\n\n    S3 is not a file-system, but we can look at it like a POSIX system.\n    \"\"\"\n\n    def __new__(cls, *args, version_id: str):\n\n        self = super().__new__(cls, *args)\n        self.version_id = version_id\n        return self\n\n    @classmethod\n    def from_uri(cls, uri: str, *, version_id: str):\n        \"\"\"\n        from_uri class method creates a class instance from uri and version id\n\n        >> from s3path import VersionedS3Path\n        >> VersionedS3Path.from_uri('s3://<bucket>/<key>', version_id='<version_id>')\n        << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n        \"\"\"\n\n        self = PureS3Path.from_uri(uri)\n        return cls(self, version_id=version_id)\n\n    @classmethod\n    def from_bucket_key(cls, bucket: str, key: str, *, version_id: str):\n        \"\"\"\n        from_bucket_key class method creates a class instance from bucket, key and version id\n\n        >> from s3path import VersionedS3Path\n        >> VersionedS3Path.from_bucket_key('<bucket>', '<key>', version_id='<version_id>')\n        << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n        \"\"\"\n\n        self = PureS3Path.from_bucket_key(bucket=bucket, key=key)\n        return cls(self, version_id=version_id)\n\n    def __repr__(self) -> str:\n        return f'{type(self).__name__}({self.as_posix()}, version_id={self.version_id})'\n\n    def joinpath(self, *args):\n\n        if not args:\n            return self\n\n        new_path = super().joinpath(*args)\n\n        if isinstance(args[-1], PureVersionedS3Path):\n            new_path.version_id = args[-1].version_id\n        else:\n            new_path = S3Path(new_path)\n\n        return new_path\n\n    def __truediv__(self, key):\n\n        if not isinstance(key, (PureS3Path, str)):\n            return NotImplemented\n\n        key = S3Path(key) if isinstance(key, str) else key\n        return key.__rtruediv__(self)\n\n    def __rtruediv__(self, key):\n\n        if not isinstance(key, (PureS3Path, str)):\n            return NotImplemented\n\n        new_path = super().__rtruediv__(key)\n        new_path.version_id = self.version_id\n        return new_path\n\n\nclass VersionedS3Path(PureVersionedS3Path, S3Path):\n    \"\"\"\n    S3Path subclass for AWS S3 service Keys with Versions.\n\n    >> from s3path import VersionedS3Path\n    >> VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n    << VersionedS3Path('/<bucket>/<key>', version_id='<version_id>')\n    \"\"\"\n\n    _accessor = _versioned_s3_accessor\n\n    def _init(self, template=None):\n        super()._init(template)\n        if template is None:\n            self._accessor = _versioned_s3_accessor\n\n\nclass StatResult(namedtuple('BaseStatResult', 'size, last_modified, version_id', defaults=(None,))):\n    \"\"\"\n    Base of os.stat_result but with boto3 s3 features\n    \"\"\"\n\n    def __getattr__(self, item):\n        if item in vars(stat_result):\n            raise UnsupportedOperation(f'{type(self).__name__} do not support {item} attribute')\n        return super().__getattribute__(item)\n\n    @property\n    def st_size(self) -> int:\n        return self.size\n\n    @property\n    def st_mtime(self) -> float:\n        return self.last_modified.timestamp()\n\n    @property\n    def st_version_id(self) -> str:\n        return self.version_id\n\n\nclass _S3DirEntry:\n    def __init__(self, name, is_dir, size=None, last_modified=None):\n        self.name = name\n        self._is_dir = is_dir\n        self._stat = StatResult(size=size, last_modified=last_modified)\n\n    def __repr__(self):\n        return f'{type(self).__name__}(name={self.name}, is_dir={self._is_dir}, stat={self._stat})'\n\n    def inode(self, *args, **kwargs):\n        return None\n\n    def is_dir(self, follow_symlinks=False):\n        if follow_symlinks:\n            raise TypeError('AWS S3 Service does not have symlink feature')\n        return self._is_dir\n\n    def is_file(self):\n        return not self._is_dir\n\n    def is_symlink(self, *args, **kwargs):\n        return False\n\n    def stat(self):\n        return self._stat\n"
  },
  {
    "path": "s3path/py.typed",
    "content": "# Marker file for PEP 561.  The mypy package uses inline types.\r\n"
  },
  {
    "path": "setup.cfg",
    "content": "[bdist_rpm]\ndoc_files = LICENSE README.rst\n\n[metadata]\nlicense_files = LICENSE\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python\nfrom setuptools import setup\n\nwith open(\"README.rst\", \"r\") as fh:\n    long_description = fh.read()\nsetup(\n    name='s3path',\n    version='0.6.5',\n    url='https://github.com/liormizr/s3path',\n    author='Lior Mizrahi',\n    author_email='li.mizr@gmail.com',\n    packages=['s3path'],\n    package_data={'s3path': [\"py.typed\"]},\n    install_requires=['boto3>=1.16.35','smart-open>=5.1.0',],\n    license='Apache 2.0',\n    long_description=long_description,\n    long_description_content_type='text/x-rst',\n    python_requires='>=3.9',\n    include_package_data=True,\n    classifiers=[\n        'Development Status :: 4 - Beta',\n        'Intended Audience :: Developers',\n        'Natural Language :: English',\n        'License :: OSI Approved :: Apache Software License',\n        'Operating System :: OS Independent',\n        'Programming Language :: Python',\n        'Programming Language :: Python :: 3.9',\n        'Programming Language :: Python :: 3.10',\n        'Programming Language :: Python :: 3.11',\n        'Programming Language :: Python :: 3.12',\n        'Programming Language :: Python :: 3.13',\n    ],\n)\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/conftest.py",
    "content": "import sys\nimport boto3\nimport pytest\nfrom moto import mock_aws\n\nfrom s3path import register_configuration_parameter, PureS3Path\n\n\nif sys.version_info >= (3, 12):\n    from s3path import accessor\n\n    def _cleanup():\n        accessor.configuration_map.get_configuration.cache_clear()\n        accessor.configuration_map.get_general_options.cache_clear()\n        accessor.configuration_map.is_setup = False\nelse:\n    from s3path import S3Path\n\n    def _cleanup():\n        S3Path._accessor.configuration_map.get_configuration.cache_clear()\n        S3Path._accessor.configuration_map.get_general_options.cache_clear()\n        S3Path._accessor.configuration_map.is_setup = False\n\n\n@pytest.fixture()\ndef reset_configuration_cache():\n    try:\n        _cleanup()\n        yield\n    finally:\n        _cleanup()\n\n\n@pytest.fixture()\ndef s3_mock(reset_configuration_cache):\n    with mock_aws():\n        register_configuration_parameter(PureS3Path('/'), resource=boto3.resource('s3'))\n        yield\n"
  },
  {
    "path": "tests/test_not_supported.py",
    "content": "import pytest\nfrom s3path import S3Path\n\n\ndef test_cwd():\n    with pytest.raises(NotImplementedError):\n        S3Path.cwd()\n\n\ndef test_expanduser():\n    with pytest.raises(NotImplementedError):\n        S3Path('/').expanduser()\n\n\ndef test_readlink():\n    with pytest.raises(NotImplementedError):\n        S3Path('/').readlink()\n\n\ndef test_home():\n    with pytest.raises(NotImplementedError):\n        S3Path.home()\n\n\ndef test_chmod():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.chmod(0o666)\n\n\ndef test_lchmod():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.lchmod(0o666)\n\n\ndef test_group():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.group()\n\n\ndef test_is_mount():\n    assert not S3Path('/fake-bucket/fake-key').is_mount()\n\n\ndef test_is_symlink():\n    assert not S3Path('/fake-bucket/fake-key').is_symlink()\n\n\ndef test_is_socket():\n    assert not S3Path('/fake-bucket/fake-key').is_socket()\n\n\ndef test_is_fifo():\n    assert not S3Path('/fake-bucket/fake-key').is_fifo()\n\n\ndef test_is_block_device():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.is_block_device()\n\n\ndef test_is_char_device():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.is_char_device()\n\n\ndef test_lstat():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.lstat()\n\n\ndef test_resolve():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.resolve()\n\n\ndef test_symlink_to():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.symlink_to('file_name')\n\n\ndef test_stat():\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(NotImplementedError):\n        path.stat(follow_symlinks=False)\n"
  },
  {
    "path": "tests/test_path_operations.py",
    "content": "import shutil\nimport sys\nfrom datetime import timedelta\nfrom pathlib import Path, PosixPath\nfrom io import UnsupportedOperation\nfrom tempfile import NamedTemporaryFile\n\nimport boto3\nimport requests\nfrom botocore.exceptions import ClientError\nimport pytest\n\nfrom s3path import PureS3Path, S3Path, StatResult, VersionedS3Path\n\n# todo: test samefile/touch method\n# todo: test security and boto config changes\n\n\ndef test_path_support():\n    assert PureS3Path in S3Path.mro()\n    assert Path in S3Path.mro()\n\n\ndef test_stat(s3_mock):\n    path = S3Path('fake-bucket/fake-key')\n    with pytest.raises(ValueError):\n        path.stat()\n\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(ClientError):\n        path.stat()\n\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/Test.test')\n    stat = path.stat()\n\n    assert isinstance(stat, StatResult)\n    assert stat == StatResult(\n        size=object_summary.size,\n        last_modified=object_summary.last_modified,\n    )\n\n    with NamedTemporaryFile() as local_file:\n        local_file.write(path.read_bytes())\n        local_file.flush()\n        local_path = Path(local_file.name)\n\n        local_stat = local_path.stat()\n        s3_stat = path.stat()\n\n        assert s3_stat.st_size == local_stat.st_size == s3_stat.size\n        assert s3_stat.last_modified.timestamp() == s3_stat.st_mtime\n        assert s3_stat.st_mtime < local_stat.st_mtime\n\n    with pytest.raises(UnsupportedOperation):\n        path.stat().st_atime\n\n    path = S3Path('/test-bucket')\n    assert path.stat() is None\n\n\ndef test_exists(s3_mock):\n    path = S3Path('./fake-key')\n    with pytest.raises(ValueError):\n        path.exists()\n\n    path = S3Path('/fake-bucket/fake-key')\n    with pytest.raises(ClientError):\n        path.exists()\n\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    assert not S3Path('/test-bucket/Test.test').exists()\n    path = S3Path('/test-bucket/directory/Test.test')\n    assert path.exists()\n    for parent in path.parents:\n        assert parent.exists()\n\n    assert S3Path('/').exists()\n\n\ndef test_glob(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    assert list(S3Path('/test-bucket/').glob('*.test')) == []\n    assert list(S3Path('/test-bucket/directory/').glob('*.test')) == [S3Path('/test-bucket/directory/Test.test')]\n    assert list(S3Path('/test-bucket/').glob('**/*.test')) == [S3Path('/test-bucket/directory/Test.test')]\n\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n\n    assert sorted(S3Path.from_uri('s3://test-bucket/').glob('*.py')) == [\n        S3Path('/test-bucket/pathlib.py'),\n        S3Path('/test-bucket/setup.py'),\n        S3Path('/test-bucket/test_pathlib.py')]\n    assert sorted(S3Path.from_uri('s3://test-bucket/').glob('*/*.py')) == [S3Path('/test-bucket/docs/conf.py')]\n    assert sorted(S3Path.from_uri('s3://test-bucket/').glob('**/*.py')) == [\n        S3Path('/test-bucket/build/lib/pathlib.py'),\n        S3Path('/test-bucket/docs/conf.py'),\n        S3Path('/test-bucket/pathlib.py'),\n        S3Path('/test-bucket/setup.py'),\n        S3Path('/test-bucket/test_pathlib.py')]\n    assert sorted(S3Path.from_uri('s3://test-bucket/').glob('*cs')) == [S3Path('/test-bucket/docs/')]\n    assert sorted(S3Path.from_uri('s3://test-bucket/').glob('docs/')) == [S3Path('/test-bucket/docs/')]\n\n\ndef test_glob_nested_folders_issue_no_115(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n    full_folder_tree = ''\n    object_summary = s3.ObjectSummary('my-bucket', 'test.txt')\n    object_summary.put(Body=b'test data')\n    for folder in range(6):\n        object_summary = s3.ObjectSummary('my-bucket', f'{full_folder_tree}test.txt')\n        object_summary.put(Body=b'test data')\n        full_folder_tree += f'{folder}/'\n\n    bucket = S3Path(\"/my-bucket/\")\n    path = bucket\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/test.txt')]\n    path /= S3Path('0/')\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/0/test.txt')]\n    path /= S3Path('1/')\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/0/1/test.txt')]\n    path /= S3Path('2/')\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/0/1/2/test.txt')]\n    path /= S3Path('3/')\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/0/1/2/3/test.txt')]\n    path /= S3Path('4/')\n    assert list(path.glob('*.txt')) == [S3Path('/my-bucket/0/1/2/3/4/test.txt')]\n\n    bucket = S3Path(\"/my-bucket/\")\n    path = bucket\n    for index, folder in enumerate(range(6)):\n        assert sum(1 for _ in path.rglob('*.txt')) == 6 - index\n        path /= S3Path(f'{folder}/')\n\n\ndef test_glob_nested_folders_issue_no_120(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n    object_summary = s3.ObjectSummary('my-bucket', 's3path-test/nested/further/test.txt')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path(\"/my-bucket/s3path-test/nested/\")\n    assert list(path.glob(\"further/*\")) == [S3Path('/my-bucket/s3path-test/nested/further/test.txt')]\n\n\ndef test_glob_issue_160(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n    example_paths = [\n        's3path/output',\n        's3path/1/output',\n        's3path/2/output',\n        's3path/3/output',\n    ]\n    for example_path in example_paths:\n        object_summary = s3.ObjectSummary('my-bucket', f'{example_path}/test.txt')\n        object_summary.put(Body=b'test data')\n\n    path = S3Path.from_uri(\"s3://my-bucket/s3path\")\n    assert set(path.glob('**/output/')) == {\n        S3Path('/my-bucket/s3path/output'),\n        S3Path('/my-bucket/s3path/1/output'),\n        S3Path('/my-bucket/s3path/2/output'),\n        S3Path('/my-bucket/s3path/3/output'),\n    }\n    assert sum(1 for _ in path.glob('**/output/')) == 4\n\n    assert set(path.rglob('output/')) == {\n        S3Path('/my-bucket/s3path/output'),\n        S3Path('/my-bucket/s3path/1/output'),\n        S3Path('/my-bucket/s3path/2/output'),\n        S3Path('/my-bucket/s3path/3/output'),\n    }\n    assert sum(1 for _ in path.rglob('output/')) == 4\n\n\ndef test_glob_issue_160_weird_behavior(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n\n    first_dir = S3Path.from_uri(f\"s3://my-bucket/first_dir/\")\n    new_file = first_dir / \"some_dir\" / \"empty.txt\"\n    new_file.touch()\n    assert list(first_dir.glob(\"*\")) == [S3Path('/my-bucket/first_dir/some_dir/')]\n\n    second_dir = S3Path.from_uri(f\"s3://my-bucket/first_dir/second_dir/\")\n    new_file = second_dir / \"some_dir\" / \"empty.txt\"\n    new_file.touch()\n    assert list(second_dir.glob(\"*\")) == [S3Path('/my-bucket/first_dir/second_dir/some_dir/')]\n\n    third_dir = S3Path.from_uri(f\"s3://my-bucket/first_dir/second_dir/third_dir/\")\n    new_file = third_dir / \"some_dir\" / \"empty.txt\"\n    new_file.touch()\n    assert list(third_dir.glob(\"*\")) == [S3Path('/my-bucket/first_dir/second_dir/third_dir/some_dir/')]\n\n\ndef test_glob_nested_folders_issue_no_179(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n    example_paths = [\n        's3path/nested/further/andfurther/too_far_1.txt',\n        's3path/nested/further/andfurther/too_far_2.txt',\n    ]\n    for example_path in example_paths:\n        object_summary = s3.ObjectSummary('my-bucket', f'{example_path}/test.txt')\n        object_summary.put(Body=b'test data')\n\n    path = S3Path.from_uri(\"s3://my-bucket/s3path/nested\")\n    assert list(path.glob(\"*/*\")) == [\n        S3Path('/my-bucket/s3path/nested/further/andfurther')]\n\n\ndef test_rglob(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    assert list(S3Path('/test-bucket/').rglob('*.test')) == [S3Path('/test-bucket/directory/Test.test')]\n    assert list(S3Path('/test-bucket/').rglob('**/*.test')) == [S3Path('/test-bucket/directory/Test.test')]\n\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n\n    assert sorted(S3Path.from_uri('s3://test-bucket/').rglob('*.py')) == [\n        S3Path('/test-bucket/build/lib/pathlib.py'),\n        S3Path('/test-bucket/docs/conf.py'),\n        S3Path('/test-bucket/pathlib.py'),\n        S3Path('/test-bucket/setup.py'),\n        S3Path('/test-bucket/test_pathlib.py')]\n\n\ndef test_accessor_scandir(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n\n    assert sorted(S3Path.from_uri('s3://test-bucket/').rglob('*.py')) == [\n        S3Path('/test-bucket/build/lib/pathlib.py'),\n        S3Path('/test-bucket/docs/conf.py'),\n        S3Path('/test-bucket/pathlib.py'),\n        S3Path('/test-bucket/setup.py'),\n        S3Path('/test-bucket/test_pathlib.py')]\n\n\ndef test_is_dir(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n\n    assert S3Path('/').is_dir()\n    assert not S3Path('/test-bucket/fake.test').is_dir()\n    assert not S3Path('/test-bucket/fake/').is_dir()\n    assert S3Path('/test-bucket/directory').is_dir()\n    assert not S3Path('/test-bucket/directory/Test.test').is_dir()\n    assert not S3Path('/test-bucket/pathlib.py').is_dir()\n    assert not S3Path('/test-bucket/docs/conf.py').is_dir()\n    assert S3Path('/test-bucket/docs/').is_dir()\n    assert S3Path('/test-bucket/build/').is_dir()\n    assert S3Path('/test-bucket/build/lib').is_dir()\n    assert not S3Path('/test-bucket/build/lib/pathlib.py').is_dir()\n\n\ndef test_is_file(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n\n    assert not S3Path('/test-bucket/fake.test').is_file()\n    assert not S3Path('/test-bucket/fake/').is_file()\n    assert not S3Path('/test-bucket/directory').is_file()\n    assert S3Path('/test-bucket/directory/Test.test').is_file()\n    assert S3Path('/test-bucket/pathlib.py').is_file()\n    assert S3Path('/test-bucket/docs/conf.py').is_file()\n    assert not S3Path('/test-bucket/docs/').is_file()\n    assert not S3Path('/test-bucket/build/').is_file()\n    assert not S3Path('/test-bucket/build/lib').is_file()\n    assert S3Path('/test-bucket/build/lib/pathlib.py').is_file()\n\n\ndef test_read_line(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data\\ntest data')\n\n    with S3Path('/test-bucket/directory/Test.test').open(\"r\") as fp:\n        assert fp.readline() == \"test data\\n\"\n        assert fp.readline() == \"test data\"\n        assert fp.readline() == \"\"\n\n\ndef test_read_lines(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data\\ntest data')\n\n    with S3Path('/test-bucket/directory/Test.test').open(\"r\") as fp:\n        assert len(fp.readlines()) == 2\n\n\ndef test_fix_url_encoding_issue(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'paramA=valueA/paramB=valueB/name')\n    object_summary.put(Body=b'test data\\ntest data')\n\n    assert S3Path('/test-bucket/paramA=valueA/paramB=valueB/name').read_bytes() == b'test data\\ntest data'\n\n\ndef test_read_lines_hint(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data\\ntest data')\n\n    with S3Path('/test-bucket/directory/Test.test').open() as fp:\n        assert len(fp.readlines(1)) == 1\n\n    with S3Path('/test-bucket/directory/Test.test').open('br') as fp:\n        assert len(fp.readlines(1)) == 1  # work only in binary mode\n\n\ndef test_iter_lines(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data\\ntest data\\n')\n\n    with S3Path('/test-bucket/directory/Test.test').open(\"r\") as fp:\n        for line in fp:\n            assert line == \"test data\\n\"\n\n\ndef test_write_lines(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    with path.open(\"w\") as fp:\n        fp.writelines([\"line 1\\n\", \"line 2\\n\"])\n\n    res = path.read_text().splitlines()\n    assert len(res) == 2\n\n\ndef test_iterdir(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'setup.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'test_pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'build/lib/pathlib.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/make.bat')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/index.rst')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/Makefile')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_templates/11conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_build/22conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_static/conf.py')\n    object_summary.put(Body=b'test data')\n\n    s3_path = S3Path('/test-bucket/docs')\n    assert sorted(s3_path.iterdir()) == sorted([\n        S3Path('/test-bucket/docs/_build'),\n        S3Path('/test-bucket/docs/_static'),\n        S3Path('/test-bucket/docs/_templates'),\n        S3Path('/test-bucket/docs/conf.py'),\n        S3Path('/test-bucket/docs/index.rst'),\n        S3Path('/test-bucket/docs/make.bat'),\n        S3Path('/test-bucket/docs/Makefile'),\n    ])\n\n\ndef test_iterdir_on_buckets(s3_mock):\n    s3 = boto3.resource('s3')\n    for index in range(4):\n        s3.create_bucket(Bucket='test-bucket{}'.format(index))\n\n    s3_root_path = S3Path('/')\n    assert sorted(s3_root_path.iterdir()) == [\n        S3Path('/test-bucket{}'.format(index))\n        for index in range(4)\n    ]\n\n\ndef test_empty_directory(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n\n    assert list(S3Path('/test-bucket').iterdir()) == []\n\n    s3.meta.client.put_object(Bucket='test-bucket', Key='to/empty/dir/')\n    assert list(S3Path('/test-bucket/to/empty/dir/').iterdir()) == []\n\n\ndef test_open_for_reading(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    file_obj = path.open()\n    assert file_obj.read() == 'test data'\n\n\ndef test_presigned_url(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    presigned_url = path.get_presigned_url()\n    assert requests.get(presigned_url).content == b'test data'\n\n\ndef test_presigned_url_expire(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    presigned_url = path.get_presigned_url(expire_in=123)\n    assert requests.get(presigned_url).content == b'test data'\n\n\ndef test_presigned_url_expire_with_timedelta(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    presigned_url = path.get_presigned_url(expire_in=timedelta(seconds=123))\n    assert requests.get(presigned_url).content == b'test data'\n\n\ndef test_presigned_url_expire_with_negative_timedelta(s3_mock):\n    path = S3Path('/test-bucket/directory/Test.test')\n    with pytest.raises(ValueError) as err:\n        path.get_presigned_url(expire_in=timedelta(seconds=-123))\n    assert str(err.value) == (\n        \"The expire_in argument can't represent a negative or null time delta. \"\n        \"You provided expire_in = -123 seconds which is below or equal to 0 seconds.\"\n    )\n\n\ndef test_presigned_url_expire_with_negative_seconds(s3_mock):\n    path = S3Path('/test-bucket/directory/Test.test')\n    with pytest.raises(ValueError) as err:\n        path.get_presigned_url(expire_in=-123)\n    assert str(err.value) == (\n        \"The expire_in argument can't represent a negative or null time delta. \"\n        \"You provided expire_in = -123 seconds which is below or equal to 0 seconds.\"\n    )\n\n\ndef test_presigned_url_malformed_path(s3_mock):\n    path = S3Path('Test.test')\n    with pytest.raises(ValueError) as err:\n        path.get_presigned_url(expire_in=timedelta(seconds=123))\n    assert str(err.value) == \"relative path have no bucket, key specification\"\n\n\ndef test_open_for_write(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    bucket = s3.Bucket('test-bucket')\n    assert sum(1 for _ in bucket.objects.all()) == 0\n\n    path = S3Path('/test-bucket/directory/Test.test')\n\n    with path.open(mode='bw') as file_obj:\n        assert file_obj.writable()\n        file_obj.write(b'test data\\n')\n        file_obj.writelines([b'test data'])\n    assert sum(1 for _ in bucket.objects.all()) == 1\n\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    streaming_body = object_summary.get()['Body']\n\n    assert list(streaming_body.iter_lines()) == [\n        b'test data',\n        b'test data'\n    ]\n\n\ndef test_open_binary_read(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    with path.open(mode='br') as file_obj:\n        assert file_obj.readlines() == [b'test data']\n\n    with path.open(mode='rb') as file_obj:\n        assert file_obj.readline() == b'test data'\n        assert file_obj.readline() == b''\n        assert file_obj.readline() == b''\n\n\ndef test_read_bytes(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    assert path.read_bytes() == b'test data'\n\n\ndef test_open_text_read(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    with path.open(mode='r') as file_obj:\n        assert file_obj.readlines() == ['test data']\n\n    with path.open(mode='rt') as file_obj:\n        assert file_obj.readline() == 'test data'\n        assert file_obj.readline() == ''\n        assert file_obj.readline() == ''\n\n\ndef test_read_text(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    assert path.read_text() == 'test data'\n\n\ndef test_owner(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'directory/Test.test')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/directory/Test.test')\n    assert path.owner() == 'webfile'\n\n\ndef test_rename_s3_to_s3(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/make.bat')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/index.rst')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/Makefile')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_templates/11conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_build/22conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_static/conf.py')\n    object_summary.put(Body=b'test data')\n\n    s3.create_bucket(Bucket='target-bucket')\n\n    S3Path('/test-bucket/docs/conf.py').rename('/test-bucket/docs/conf1.py')\n    assert not S3Path('/test-bucket/docs/conf.py').exists()\n    assert S3Path('/test-bucket/docs/conf1.py').is_file()\n\n    path = S3Path('/test-bucket/docs/')\n    path.rename(S3Path('/target-bucket') / S3Path('folder'))\n    assert not path.exists()\n    assert S3Path('/target-bucket/folder/conf1.py').is_file()\n    assert S3Path('/target-bucket/folder/make.bat').is_file()\n    assert S3Path('/target-bucket/folder/index.rst').is_file()\n    assert S3Path('/target-bucket/folder/Makefile').is_file()\n    assert S3Path('/target-bucket/folder/_templates/11conf.py').is_file()\n    assert S3Path('/target-bucket/folder/_build/22conf.py').is_file()\n    assert S3Path('/target-bucket/folder/_static/conf.py').is_file()\n\n\ndef test_replace_s3_to_s3(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/make.bat')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/index.rst')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/Makefile')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_templates/11conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_build/22conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_static/conf.py')\n    object_summary.put(Body=b'test data')\n\n    s3.create_bucket(Bucket='target-bucket')\n\n    S3Path('/test-bucket/docs/conf.py').replace('/test-bucket/docs/conf1.py')\n    assert not S3Path('/test-bucket/docs/conf.py').exists()\n    assert S3Path('/test-bucket/docs/conf1.py').is_file()\n\n    path = S3Path('/test-bucket/docs/')\n    path.replace(S3Path('/target-bucket') / S3Path('folder'))\n    assert not path.exists()\n    assert S3Path('/target-bucket/folder/conf1.py').is_file()\n    assert S3Path('/target-bucket/folder/make.bat').is_file()\n    assert S3Path('/target-bucket/folder/index.rst').is_file()\n    assert S3Path('/target-bucket/folder/Makefile').is_file()\n    assert S3Path('/target-bucket/folder/_templates/11conf.py').is_file()\n    assert S3Path('/target-bucket/folder/_build/22conf.py').is_file()\n    assert S3Path('/target-bucket/folder/_static/conf.py').is_file()\n\n\ndef test_rmdir(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/make.bat')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/index.rst')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/Makefile')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_templates/11conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_build/22conf.py')\n    object_summary.put(Body=b'test data')\n    object_summary = s3.ObjectSummary('test-bucket', 'docs/_static/conf.py')\n    object_summary.put(Body=b'test data')\n\n    conf_path = S3Path('/test-bucket/docs/_templates')\n    assert conf_path.is_dir()\n    conf_path.rmdir()\n    assert not conf_path.exists()\n\n    path = S3Path('/test-bucket/docs/')\n    path.rmdir()\n    assert not path.exists()\n\n\ndef test_rmdir_can_remove_bucket(s3_mock):\n    s3 = boto3.resource('s3')\n    bucket = S3Path('/test-bucket/')\n    bucket.mkdir()\n    assert bucket.exists()\n    bucket.rmdir()\n    assert not bucket.exists()\n\n\ndef test_mkdir(s3_mock):\n    s3 = boto3.resource('s3')\n\n    S3Path('/test-bucket/').mkdir()\n\n    assert s3.Bucket('test-bucket') in s3.buckets.all()\n\n    S3Path('/test-bucket/').mkdir(exist_ok=True)\n\n    with pytest.raises(FileExistsError):\n        S3Path('/test-bucket/').mkdir(exist_ok=False)\n\n    with pytest.raises(FileNotFoundError):\n        S3Path('/test-second-bucket/test-directory/file.name').mkdir()\n\n    S3Path('/test-second-bucket/test-directory/file.name').mkdir(parents=True)\n\n    assert s3.Bucket('test-second-bucket') in s3.buckets.all()\n\n\ndef test_write_text(s3_mock):\n    s3 = boto3.resource('s3')\n\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'temp_key')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/temp_key')\n    data = path.read_text()\n    assert isinstance(data, str)\n\n    path.write_text(data)\n    assert path.read_text() == data\n\n\ndef test_write_bytes(s3_mock):\n    s3 = boto3.resource('s3')\n\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'temp_key')\n    object_summary.put(Body=b'test data')\n\n    path = S3Path('/test-bucket/temp_key')\n    data = path.read_bytes()\n    assert isinstance(data, bytes)\n\n    path.write_bytes(data)\n    assert path.read_bytes() == data\n\n\ndef test_unlink(s3_mock):\n    s3 = boto3.resource('s3')\n\n    s3.create_bucket(Bucket='test-bucket')\n    object_summary = s3.ObjectSummary('test-bucket', 'temp_key')\n    object_summary.put(Body=b'test data')\n    path = S3Path('/test-bucket/temp_key')\n    subdir_key = S3Path('/test-bucket/fake_folder/some_key')\n    subdir_key.write_text(\"some text\")\n    assert path.exists() is True\n    assert subdir_key.exists() is True\n    path.unlink()\n    assert path.exists() is False\n    with pytest.raises(FileNotFoundError):\n        S3Path(\"/test-bucket/fake_subfolder/fake_subkey\").unlink()\n    with pytest.raises(IsADirectoryError):\n        S3Path(\"/test-bucket/fake_folder\").unlink()\n    with pytest.raises(IsADirectoryError):\n        S3Path(\"/fake-bucket/\").unlink()\n\n    S3Path(\"/test-bucket/fake_subfolder/fake_subkey\").unlink(missing_ok=True)\n    S3Path(\"/test-bucket/fake_folder\").unlink(missing_ok=True)\n    S3Path(\"/fake-bucket/\").unlink(missing_ok=True)\n\n\ndef test_absolute(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    absolute_path = S3Path('/test-bucket/directory/Test.test')\n    assert absolute_path.absolute() is absolute_path\n\n    relative_path = S3Path('./Test.test')\n    with pytest.raises(ValueError):\n        relative_path.absolute()\n\n\ndef test_versioned_bucket(s3_mock):\n    bucket, key = 'test-versioned-bucket', 'versioned_file.txt'\n\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket=bucket)\n    s3.BucketVersioning(bucket).enable()\n\n    object_summary = s3.ObjectSummary(bucket, key)\n    file_contents_by_version = (b'Test', b'Test updated', b'Test', b'Test final')\n\n    version_id_to_file_content = {}\n    for file_content in file_contents_by_version:\n        version_id = object_summary.put(Body=file_content).get('VersionId')\n        version_id_to_file_content[version_id] = file_content\n\n    assert len(version_id_to_file_content) == len(file_contents_by_version)\n\n    # Test that we can read specific versions of the file\n    for version_id, expected_file_content in version_id_to_file_content.items():\n        versioned_paths = (\n            VersionedS3Path(f'/{bucket}/{key}', version_id=version_id),\n            VersionedS3Path(f'/{bucket}', f'{key}', version_id=version_id),\n            VersionedS3Path.from_uri(f's3://{bucket}/{key}', version_id=version_id),\n            VersionedS3Path.from_bucket_key(bucket=bucket, key=key, version_id=version_id),\n        )\n        for versioned_path in versioned_paths:\n            assert versioned_path.exists() and versioned_path.is_file()\n            assert versioned_path.stat().st_version_id == version_id\n            assert versioned_path.read_bytes() == expected_file_content\n\n    # Test that we receive the latest version of the file when S3Path is used or no version_id is specified\n    paths = (\n        S3Path(f'/{bucket}/{key}'),\n        S3Path(f'/{bucket}', f'{key}'),\n        S3Path.from_uri(f's3://{bucket}/{key}'),\n        S3Path.from_bucket_key(bucket=bucket, key=key),\n    )\n    for path in paths:\n        assert not isinstance(path, VersionedS3Path)\n        assert path.read_bytes() == file_contents_by_version[-1]\n\n\ndef test_buffered_copy(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    data = b'test data' * 10_000_000\n    source_path = S3Path('/test-bucket/source')\n    source_path.write_bytes(data)\n    target_path = S3Path('/test-bucket/target')\n    with source_path.open('rb') as source, target_path.open('wb') as target:\n        shutil.copyfileobj(source, target)\n    assert target_path.read_bytes() == data\n\n\n@pytest.mark.skipif(sys.version_info < (3, 12), reason=\"requires python 3.12 or higher\")\ndef test_walk(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    walk_test_results = [\n        (PosixPath('.'),\n         ['.pytest_cache', 'tests', 'docs', 's3path', '.github', '.git', 's3path.egg-info', '.idea'],\n         ['LICENSE', 'Makefile', 'MANIFEST.in', 'Pipfile', 'setup.py', '.gitignore', 'setup.cfg', 'README.rst',\n                   'Pipfile.lock']),\n        (PosixPath('.pytest_cache'), ['v'], ['CACHEDIR.TAG', 'README.md', '.gitignore']),\n        (PosixPath('.pytest_cache/v'), ['cache'], []),\n        (PosixPath('.pytest_cache/v/cache'), [], ['nodeids', 'lastfailed', 'stepwise']),\n        (PosixPath('tests'), [],\n         ['test_not_supported.py', 'conftest.py', 'test_path_operations.py', '__init__.py',\n                   'test_s3path_configuration.py', 'test_pure_path_operations.py']),\n        (PosixPath('docs'), [],\n         ['advance.rst', 's3path_graph.jpg', 's3path_graph.svg', 'comparison.rst', 'interface.rst']),\n        (PosixPath('s3path'), [],\n         ['accessor.py', 'old_versions.py', '__init__.py', 'py.typed', 'current_version.py']),\n        (PosixPath('.github'), ['workflows'], []),\n        (PosixPath('.github/workflows'), [], ['deploying.yml', 'testing.yml']),\n        (PosixPath('.git'), ['objects', 'info', 'logs', 'hooks', 'refs'],\n         ['config', 'HEAD', 'description', 'index', 'packed-refs']),\n        (PosixPath('.git/objects'), ['pack'], []),\n        (PosixPath('.git/objects/pack'), [],\n         ['pack-746373b9d83ac407488288f60747a6de8ac71439.idx',\n                   'pack-746373b9d83ac407488288f60747a6de8ac71439.pack']),\n        (PosixPath('.git/info'), [], ['exclude']),\n        (PosixPath('.git/logs'), ['refs'], ['HEAD']),\n        (PosixPath('.git/logs/refs'), ['heads', 'remotes'], []),\n        (PosixPath('.git/logs/refs/heads'), [], ['master']),\n        (PosixPath('.git/logs/refs/remotes'), ['origin'], []),\n        (PosixPath('.git/logs/refs/remotes/origin'), [], ['HEAD']),\n        (PosixPath('.git/hooks'), [],\n         ['commit-msg.sample', 'pre-rebase.sample', 'pre-commit.sample', 'applypatch-msg.sample',\n                   'fsmonitor-watchman.sample', 'pre-receive.sample', 'prepare-commit-msg.sample', 'post-update.sample',\n                   'pre-merge-commit.sample', 'pre-applypatch.sample', 'pre-push.sample', 'update.sample',\n                   'push-to-checkout.sample']),\n        (PosixPath('.git/refs'), ['heads', 'remotes'], []),\n        (PosixPath('.git/refs/heads'), [], ['master']),\n        (PosixPath('.git/refs/remotes'), ['origin'], []),\n        (PosixPath('.git/refs/remotes/origin'), [], ['HEAD']),\n        (PosixPath('s3path.egg-info'), [],\n         ['PKG-INFO', 'SOURCES.txt', 'requires.txt', 'top_level.txt', 'dependency_links.txt']),\n        (PosixPath('.idea'), ['inspectionProfiles'],\n         ['s3path.iml', 'vcs.xml', '.gitignore', 'workspace.xml', 'modules.xml', 'misc.xml']),\n        (PosixPath('.idea/inspectionProfiles'), [], ['profiles_settings.xml']),\n    ]\n    for path, directories, files in walk_test_results:\n        for file in files:\n            key = str(path / file)\n            object_summary = s3.ObjectSummary('test-bucket', key)\n            object_summary.put(Body=b'test data')\n\n    compare = {}\n    for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(walk_test_results, S3Path('/test-bucket').walk()):\n        compare.setdefault(s3_path.key or '.', {})['s3'] = {'files': set(s3_files), 'directories': set(s3_directories)}\n        compare.setdefault(str(local_path), {})['local'] = {'files': set(local_files), 'directories': set(local_directories)}\n\n    for root, location in compare.items():\n        assert 's3' in location and 'local' in location\n        assert location['s3']['files'] == location['local']['files']\n        assert location['s3']['directories'] == location['local']['directories']\n\n\n@pytest.mark.skipif(sys.version_info < (3, 12), reason=\"requires python 3.12 or higher\")\ndef test_walk_order(s3_mock):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n    walk_test_results = [\n        (PosixPath('.'), ['.pytest_cache'], ['LICENSE', 'Makefile', 'setup.cfg', 'README.rst']),\n        (PosixPath('.pytest_cache'), ['v'], ['CACHEDIR.TAG', 'README.md', '.gitignore']),\n        (PosixPath('.pytest_cache/v'), ['cache'], []),\n        (PosixPath('.pytest_cache/v/cache'), [], ['nodeids', 'lastfailed', 'stepwise']),\n    ]\n    for path, directories, files in walk_test_results:\n        for file in files:\n            key = str(path / file)\n            object_summary = s3.ObjectSummary('test-bucket', key)\n            object_summary.put(Body=b'test data')\n\n    for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(walk_test_results, S3Path('/test-bucket').walk()):\n        assert set(local_directories) == set(s3_directories)\n        assert set(local_files) == set(s3_files)\n\n    for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(reversed(walk_test_results), S3Path('/test-bucket').walk(top_down=False)):\n        assert set(local_directories) == set(s3_directories)\n        assert set(local_files) == set(s3_files)\n\n    assert list(p for p in S3Path('/test-bucket/fake/').walk()) == []\n\n    def on_error(exception):\n        assert isinstance(exception, FileNotFoundError)\n        print(exception, '0'*30)\n        raise exception\n    with pytest.raises(FileNotFoundError):\n        for _ in S3Path('/test-bucket/fake/').walk(on_error=on_error):\n            pass\n"
  },
  {
    "path": "tests/test_pure_path_operations.py",
    "content": "import os\nimport pytest\nfrom pathlib import Path, PurePosixPath, PureWindowsPath\nfrom s3path import PureS3Path\n\n\ndef test_paths_of_a_different_flavour():\n    with pytest.raises(TypeError):\n        PureS3Path('/bucket/key') < PurePosixPath('/bucket/key')\n\n    with pytest.raises(TypeError):\n        PureWindowsPath('/bucket/key') > PureS3Path('/bucket/key')\n\n\ndef test_repr():\n    assert repr(PureS3Path('setup.py')) == \"PureS3Path('setup.py')\"\n    assert str(PureS3Path('setup.py')) == 'setup.py'\n    assert bytes(PureS3Path('setup.py')) == b'setup.py'\n    assert PureS3Path('/usr/bin').as_posix() == '/usr/bin'\n\n\ndef test_fspath():\n    assert os.fspath(PureS3Path('/usr/bin')) == '/usr/bin'\n\n\ndef test_from_uri_issue_150():\n    uri = 's3://bucket/test/2023-09-10T00%3A00%3A00.000Z.txt'\n    string = '/bucket/test/2023-09-10T00:00:00.000Z.txt'\n    path = PureS3Path.from_uri(uri)\n    assert path.as_uri() == uri\n    assert str(path) == string\n\n\ndef test_join_strs():\n    assert PureS3Path('foo', 'some/path', 'bar') == PureS3Path('foo/some/path/bar')\n\n\ndef test_join_paths():\n    assert PureS3Path(Path('foo'), Path('bar')) == PureS3Path('foo/bar')\n\n\ndef test_empty():\n    assert PureS3Path() == PureS3Path('.')\n\n\ndef test_absolute_paths():\n    assert PureS3Path('/etc', '/usr', 'lib64') == PureS3Path('/usr/lib64')\n\n\ndef test_slashes_single_double_dots():\n    assert PureS3Path('foo//bar') == PureS3Path('foo/bar')\n    assert PureS3Path('foo/./bar') == PureS3Path('foo/bar')\n    assert PureS3Path('foo/../bar') == PureS3Path('bar')\n    assert PureS3Path('../bar') == PureS3Path('../bar')\n    assert PureS3Path('foo', '../bar') == PureS3Path('bar')\n\n\ndef test_operators():\n    assert PureS3Path('/etc') / 'init.d' / 'apache2' == PureS3Path('/etc/init.d/apache2')\n    assert '/usr' / PureS3Path('bin') == PureS3Path('/usr/bin')\n\n\ndef test_parts():\n    assert PureS3Path('foo//bar').parts == ('foo', 'bar')\n    assert PureS3Path('foo/./bar').parts == ('foo', 'bar')\n    assert PureS3Path('foo/../bar').parts == ('bar',)\n    assert PureS3Path('../bar').parts == ('..', 'bar')\n    assert PureS3Path('foo', '../bar').parts == ('bar',)\n    assert PureS3Path('/foo/bar').parts == ('/', 'foo', 'bar')\n\n\n@pytest.mark.parametrize(\"path\", [\"/foo\", \"/foo/\"])\ndef test_is_bucket_with_valid_bucket_paths(path):\n    assert PureS3Path(path).is_bucket\n\n\n@pytest.mark.parametrize(\"path\", [\"//foo\", \"foo/\", \"foo\", \"\", \"/foo/bar\"])\ndef test_is_bucket_with_invalid_bucket_paths(path):\n    assert not PureS3Path(path).is_bucket\n\n\ndef test_drive():\n    assert PureS3Path('foo//bar').drive == ''\n    assert PureS3Path('foo/./bar').drive == ''\n    assert PureS3Path('foo/../bar').drive == ''\n    assert PureS3Path('../bar').drive == ''\n    assert PureS3Path('foo', '../bar').drive == ''\n    assert PureS3Path('/foo/bar').drive == ''\n\n\ndef test_root():\n    assert PureS3Path('foo//bar').root == ''\n    assert PureS3Path('foo/./bar').root == ''\n    assert PureS3Path('foo/../bar').root == ''\n    assert PureS3Path('../bar').root == ''\n    assert PureS3Path('foo', '../bar').root == ''\n    assert PureS3Path('/foo/bar').root == '/'\n\n\ndef test_anchor():\n    assert PureS3Path('foo//bar').anchor == ''\n    assert PureS3Path('foo/./bar').anchor == ''\n    assert PureS3Path('foo/../bar').anchor == ''\n    assert PureS3Path('../bar').anchor == ''\n    assert PureS3Path('foo', '../bar').anchor == ''\n    assert PureS3Path('/foo/bar').anchor == '/'\n\n\ndef test_parents():\n    assert tuple(PureS3Path('foo//bar').parents) == (PureS3Path('foo'), PureS3Path('.'))\n    assert tuple(PureS3Path('foo/./bar').parents) == (PureS3Path('foo'), PureS3Path('.'))\n    assert tuple(PureS3Path('foo/../bar').parents) == (PureS3Path('.'),)\n    assert tuple(PureS3Path('../bar').parents) == (PureS3Path('..'), PureS3Path('.'))\n    assert tuple(PureS3Path('foo', '../bar').parents) == (PureS3Path('.'),)\n    assert tuple(PureS3Path('/foo/bar').parents) == (PureS3Path('/foo'), PureS3Path('/'))\n\n\ndef test_parent():\n    assert PureS3Path('foo//bar').parent == PureS3Path('foo')\n    assert PureS3Path('foo/./bar').parent == PureS3Path('foo')\n    assert PureS3Path('foo/../bar').parent == PureS3Path('.')\n    assert PureS3Path('../bar').parent == PureS3Path('..')\n    assert PureS3Path('foo', '../bar').parent == PureS3Path('.')\n    assert PureS3Path('/foo/bar').parent == PureS3Path('/foo')\n    assert PureS3Path('.').parent == PureS3Path('.')\n    assert PureS3Path('/').parent == PureS3Path('/')\n\n\ndef test_name():\n    assert PureS3Path('my/library/setup.py').name == 'setup.py'\n\n\ndef test_suffix():\n    assert PureS3Path('my/library/setup.py').suffix == '.py'\n    assert PureS3Path('my/library.tar.gz').suffix == '.gz'\n    assert PureS3Path('my/library').suffix == ''\n\n\ndef test_suffixes():\n    assert PureS3Path('my/library.tar.gar').suffixes == ['.tar', '.gar']\n    assert PureS3Path('my/library.tar.gz').suffixes == ['.tar', '.gz']\n    assert PureS3Path('my/library').suffixes == []\n\n\ndef test_stem():\n    assert PureS3Path('my/library.tar.gar').stem == 'library.tar'\n    assert PureS3Path('my/library.tar').stem == 'library'\n    assert PureS3Path('my/library').stem == 'library'\n\n\ndef test_uri():\n    assert PureS3Path('/etc/passwd').as_uri() == 's3://etc/passwd'\n    assert PureS3Path('/etc/init.d/apache2').as_uri() == 's3://etc/init.d/apache2'\n    assert PureS3Path('/bucket/key').as_uri() == 's3://bucket/key'\n\n\ndef test_absolute():\n    assert PureS3Path('/a/b').is_absolute()\n    assert not PureS3Path('a/b').is_absolute()\n\n\ndef test_reserved():\n    assert not PureS3Path('/a/b').is_reserved()\n    assert not PureS3Path('a/b').is_reserved()\n\n\ndef test_joinpath():\n    assert PureS3Path('/etc').joinpath('passwd') == PureS3Path('/etc/passwd')\n    assert PureS3Path('/etc').joinpath(PureS3Path('passwd')) == PureS3Path('/etc/passwd')\n    assert PureS3Path('/etc').joinpath('init.d', 'apache2') == PureS3Path('/etc/init.d/apache2')\n\n\ndef test_match():\n    assert PureS3Path('a/b.py').match('*.py')\n    assert PureS3Path('/a/b/c.py').match('b/*.py')\n    assert not PureS3Path('/a/b/c.py').match('a/*.py')\n    assert PureS3Path('/a.py').match('/*.py')\n    assert not PureS3Path('a/b.py').match('/*.py')\n    assert not PureS3Path('a/b.py').match('*.Py')\n\n\ndef test_relative_to():\n    s3_path = PureS3Path('/etc/passwd')\n    assert s3_path.relative_to('/') == PureS3Path('etc/passwd')\n    assert s3_path.relative_to('/etc') == PureS3Path('passwd')\n    with pytest.raises(ValueError):\n        s3_path.relative_to('/usr')\n\n\ndef test_with_name():\n    s3_path = PureS3Path('/Downloads/pathlib.tar.gz')\n    assert s3_path.with_name('setup.py') == PureS3Path('/Downloads/setup.py')\n    s3_path = PureS3Path('/')\n    with pytest.raises(ValueError):\n        s3_path.with_name('setup.py')\n\n\ndef test_with_suffix():\n    s3_path = PureS3Path('/Downloads/pathlib.tar.gz')\n    assert s3_path.with_suffix('.bz2') == PureS3Path('/Downloads/pathlib.tar.bz2')\n    s3_path = PureS3Path('README')\n    assert s3_path.with_suffix('.txt') == PureS3Path('README.txt')\n    s3_path = PureS3Path('README.txt')\n    assert s3_path.with_suffix('') == PureS3Path('README')\n"
  },
  {
    "path": "tests/test_s3path_configuration.py",
    "content": "\nimport sys\nimport pytest\nimport smart_open\nfrom pathlib import Path\nfrom packaging.version import Version\n\nimport boto3\nfrom botocore.client import Config\n\nfrom s3path import S3Path, PureS3Path, register_configuration_parameter\n\n\nif sys.version_info >= (3, 12):\n    from s3path import accessor\n    _config_key_parser = str\nelse:\n    accessor = S3Path._accessor\n    _config_key_parser = lambda path: path\n\n\ndef test_s3_configuration_map_repr():\n    assert repr(accessor.configuration_map)\n\n\ndef test_basic_configuration(reset_configuration_cache):\n    path = S3Path('/foo/')\n\n    accessor.configuration_map.arguments = accessor.configuration_map.resources = None\n\n    assert _config_key_parser(path) not in (accessor.configuration_map.arguments or ())\n    assert _config_key_parser(path) not in (accessor.configuration_map.resources or ())\n    assert accessor.configuration_map.get_configuration(path) == (\n        accessor.configuration_map.default_resource, {})\n\n    assert (accessor.configuration_map.get_configuration(S3Path('/foo/'))\n            == accessor.configuration_map.get_configuration(PureS3Path('/foo/')))\n\n\ndef test_register_configuration_exceptions(reset_configuration_cache):\n    with pytest.raises(TypeError):\n        register_configuration_parameter(Path('/'), parameters={'ContentType': 'text/html'})\n\n    with pytest.raises(TypeError):\n        register_configuration_parameter(S3Path('/foo/'), parameters=('ContentType', 'text/html'))\n\n    with pytest.raises(ValueError):\n        register_configuration_parameter(S3Path('/foo/'))\n\n\ndef test_hierarchical_configuration(reset_configuration_cache):\n    path = S3Path('/foo/')\n    register_configuration_parameter(path, parameters={'ContentType': 'text/html'})\n    assert _config_key_parser(path) in accessor.configuration_map.arguments\n    assert _config_key_parser(path) not in accessor.configuration_map.resources\n    assert accessor.configuration_map.get_configuration(path) == (\n        accessor.configuration_map.default_resource, {'ContentType': 'text/html'})\n\n    assert (accessor.configuration_map.get_configuration(S3Path('/foo/'))\n            == accessor.configuration_map.get_configuration(PureS3Path('/foo/')))\n\n\ndef test_boto_methods_with_configuration(s3_mock, reset_configuration_cache):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='test-bucket')\n\n    bucket = S3Path('/test-bucket/')\n    register_configuration_parameter(bucket, parameters={'ContentType': 'text/html'})\n    key = bucket.joinpath('bar.html')\n    key.write_text('hello')\n\n\ndef test_configuration_per_bucket(reset_configuration_cache):\n    local_stack_bucket_path = PureS3Path('/LocalStackBucket/')\n    minio_bucket_path = PureS3Path('/MinIOBucket/')\n    default_aws_s3_path = PureS3Path('/')\n\n    register_configuration_parameter(\n        default_aws_s3_path,\n        parameters={'ContentType': 'text/html'})\n    register_configuration_parameter(\n        local_stack_bucket_path,\n        parameters={},\n        resource=boto3.resource('s3', endpoint_url='http://localhost:4566'))\n    register_configuration_parameter(\n        minio_bucket_path,\n        parameters={'OutputSerialization': {'CSV': {}}},\n        resource=boto3.resource(\n            's3',\n            endpoint_url='http://localhost:9000',\n            aws_access_key_id='minio',\n            aws_secret_access_key='minio123',\n            config=Config(signature_version='s3v4'),\n            region_name='us-east-1'))\n\n    assert accessor.configuration_map.get_configuration(PureS3Path('/')) == (\n        accessor.configuration_map.default_resource, {'ContentType': 'text/html'})\n    assert accessor.configuration_map.get_configuration(PureS3Path('/some_bucket')) == (\n        accessor.configuration_map.default_resource, {'ContentType': 'text/html'})\n    assert accessor.configuration_map.get_configuration(PureS3Path('/some_bucket')) == (\n        accessor.configuration_map.default_resource, {'ContentType': 'text/html'})\n\n    resources, arguments = accessor.configuration_map.get_configuration(minio_bucket_path)\n    assert arguments == {'OutputSerialization': {'CSV': {}}}\n    assert resources.meta.client._endpoint.host == 'http://localhost:9000'\n\n    resources, arguments = accessor.configuration_map.get_configuration(minio_bucket_path / 'some_key')\n    assert arguments == {'OutputSerialization': {'CSV': {}}}\n    assert resources.meta.client._endpoint.host == 'http://localhost:9000'\n\n    resources, arguments = accessor.configuration_map.get_configuration(local_stack_bucket_path)\n    assert arguments == {}\n    assert resources.meta.client._endpoint.host == 'http://localhost:4566'\n\n    resources, arguments = accessor.configuration_map.get_configuration(local_stack_bucket_path / 'some_key')\n    assert arguments == {}\n    assert resources.meta.client._endpoint.host == 'http://localhost:4566'\n\n\ndef test_open_method_with_custom_endpoint_url(s3_mock, reset_configuration_cache, monkeypatch):\n    s3 = boto3.resource('s3')\n    s3.create_bucket(Bucket='my-bucket')\n    monkeypatch.setattr(S3Path, 'exists', lambda self: True)\n\n    local_path = PureS3Path('/local/')\n    register_configuration_parameter(\n        local_path,\n        parameters={},\n        resource=boto3.resource('s3', endpoint_url='http://localhost'))\n\n    file_object = S3Path('/local/directory/Test.test').open('br')\n    if Version(smart_open.__version__) <= Version('3.0.0'):\n        assert file_object._object.meta.client._endpoint.host == 'http://localhost'\n    else:\n        assert file_object._client.client._endpoint.host == 'http://localhost'\n\n\ndef test_issue_123():\n    path = S3Path('/bucket')\n    old_resource, _ = accessor.configuration_map.get_configuration(path)\n\n    boto3.setup_default_session()\n    s3 = boto3.resource('s3')\n    register_configuration_parameter(path, resource=s3)\n\n    new_resource, _ = accessor.configuration_map.get_configuration(path)\n    assert new_resource is s3\n    assert new_resource is not old_resource\n"
  }
]